<!DOCTYPE html>
import pandas as pd #for manipulating the csv data
pd.set_option('display.max_rows', None)
import numpy as np #for mathematical calculation
from sklearn.metrics import confusion_matrix, accuracy_score, classification_report
from sklearn.model_selection import train_test_split
from math import log2, e, log, exp,log
import sys, threading
from sklearn.impute import KNNImputer
sys.setrecursionlimit(10**7) # max depth of recursion
threading.stack_size(2**27) # new thread will get stack of such size
0
def nulling (df_in, col_name):
for i,row in enumerate(df_in[col_name]):
if row == 0:
df_in.at[i,col_name]=None
df_out = df_in
return df_out
def rounding (df_in, col_name,r):
for i,row in enumerate(df_in[col_name]):
df_in.at[i,col_name]=round(row,r)
df_out = df_in
return df_out
def impute_knn(df_in):
imputer = KNNImputer(n_neighbors=3)
imputed = imputer.fit_transform(df_in)
df_imputed = pd.DataFrame(imputed, columns=df_in.columns)
return df_imputed
def impute_knnn(df_in):
data = df_in
imputer = KNNImputer(n_neighbors=3)
#class 0
class0 = data[data['Outcome']==0]
class0idx = class0.index
#class 1
class1=data[data['Outcome']==1]
class1idx = class1.index
imputedc0 = imputer.fit_transform(class0)
imputedc1 = imputer.fit_transform(class1)
df_imputed0 = pd.DataFrame(imputedc0,index=class0idx, columns=data.columns)
df_imputed1 = pd.DataFrame(imputedc1,index=class1idx, columns=data.columns)
df_imputed=pd.concat([df_imputed0,df_imputed1])
df_imputed=df_imputed.sort_index()
return df_imputed
def praproses(daf,knn=1):
df=daf.copy()
tempn= ['Glucose','BloodPressure','SkinThickness','Insulin','BMI']
tempr={
'Glucose':0,
'Pregnancies':0,
'BloodPressure':0,
'SkinThickness':0,
'Insulin':0,
'BMI':1
}
if knn == 1:
for i in df.head():
if i in tempn:
df= nulling(df,i)
df=impute_knnn(df)
for i in df.head():
if i in tempr:
df=rounding(df,i,tempr[i])
return df
def get_label_v(labels):
n_labels = len(labels)
if n_labels <= 1:
return 0
value,counts = np.unique(labels, return_counts=True)
probs = counts / n_labels
return value,counts,n_labels,probs
def entropy(probs):
ent = 0.
try:
for i in probs:
ent -= i * log2(i)
return ent
except:
ent=0
return ent
def ign_zero(x,y):
try:
return x/y
except:
return 0
def selecter(val,df,col):
sk=len(df.loc[df[col]<=val])
sl=len(df.loc[df[col]>val])
sk0=len(df.loc[(df[col]<=val)&(df['Outcome']==0)].filter(col))
sk1=len(df.loc[(df[col]<=val)&(df['Outcome']==1)].filter(col))
sl0=len(df.loc[(df[col]>val)&(df['Outcome']==0)].filter(col))
sl1=len(df.loc[(df[col]>val)&(df['Outcome']==1)].filter(col))
pskt=[ign_zero(sk0,sk),ign_zero(sk1,sk)]
pslt=[ign_zero(sl0,sl),ign_zero(sl1,sl)]
return sk,sk0,sk1,sl,sl0,sl1,pskt,pslt
def get_max(df):
minval =-1
val=0
for i in df:
if i[1] > minval:
minval=i[1]
val=i[0]
return minval,val
def diskritisasi(df,l_data,l_col):
temp=[]
value,counts,n_val,probs = get_label_v(df[l_col])
for idx,i in enumerate(value):
sk,_,_,sl,_,_,pskt,pslt = selecter(i,df,l_col)
epskt=entropy(pskt)
epslt=entropy(pslt)
_,_,n_data,pdata= get_label_v(df[l_data])
entt=entropy(pdata)
informa= entt-(sk/n_data*epskt+sl/n_data*epslt)
temp.append([i,informa])
return get_max(temp)
def helper_disk(df,l_data):
temp={}
for i in df.head():
if i != l_data:
temp[i]=diskritisasi(df,l_data,i)
return temp
def tosplt(df,sp):
zz=df
for i in sp:
aa= [sp[i][1] if x== True else sp[i][1]+1 for x in zz[i]<=sp[i][1] ]
zz.drop(columns=[i])
zz[i]=aa
return zz
def runAll(dataa):
data=dataa.copy()
data=praproses(data)
splitter = helper_disk(data,'Outcome')
print(splitter)
zz=data.copy()
pp_disc=tosplt(zz,splitter)
return pp_disc
def runNoknn(dataa):
data=dataa.copy()
data=praproses(data,0)
splitter = helper_disk(data,'Outcome')
zz=data.copy()
pp_disc=tosplt(zz,splitter)
return pp_disc
raw_data = pd.read_csv('diabetes.csv')
raw_data
Pregnancies | Glucose | BloodPressure | SkinThickness | Insulin | BMI | DiabetesPedigreeFunction | Age | Outcome | |
---|---|---|---|---|---|---|---|---|---|
0 | 6 | 148 | 72 | 35 | 0 | 33.6 | 0.627 | 50 | 1 |
1 | 1 | 85 | 66 | 29 | 0 | 26.6 | 0.351 | 31 | 0 |
2 | 8 | 183 | 64 | 0 | 0 | 23.3 | 0.672 | 32 | 1 |
3 | 1 | 89 | 66 | 23 | 94 | 28.1 | 0.167 | 21 | 0 |
4 | 0 | 137 | 40 | 35 | 168 | 43.1 | 2.288 | 33 | 1 |
5 | 5 | 116 | 74 | 0 | 0 | 25.6 | 0.201 | 30 | 0 |
6 | 3 | 78 | 50 | 32 | 88 | 31.0 | 0.248 | 26 | 1 |
7 | 10 | 115 | 0 | 0 | 0 | 35.3 | 0.134 | 29 | 0 |
8 | 2 | 197 | 70 | 45 | 543 | 30.5 | 0.158 | 53 | 1 |
9 | 8 | 125 | 96 | 0 | 0 | 0.0 | 0.232 | 54 | 1 |
10 | 4 | 110 | 92 | 0 | 0 | 37.6 | 0.191 | 30 | 0 |
11 | 10 | 168 | 74 | 0 | 0 | 38.0 | 0.537 | 34 | 1 |
12 | 10 | 139 | 80 | 0 | 0 | 27.1 | 1.441 | 57 | 0 |
13 | 1 | 189 | 60 | 23 | 846 | 30.1 | 0.398 | 59 | 1 |
14 | 5 | 166 | 72 | 19 | 175 | 25.8 | 0.587 | 51 | 1 |
15 | 7 | 100 | 0 | 0 | 0 | 30.0 | 0.484 | 32 | 1 |
16 | 0 | 118 | 84 | 47 | 230 | 45.8 | 0.551 | 31 | 1 |
17 | 7 | 107 | 74 | 0 | 0 | 29.6 | 0.254 | 31 | 1 |
18 | 1 | 103 | 30 | 38 | 83 | 43.3 | 0.183 | 33 | 0 |
19 | 1 | 115 | 70 | 30 | 96 | 34.6 | 0.529 | 32 | 1 |
20 | 3 | 126 | 88 | 41 | 235 | 39.3 | 0.704 | 27 | 0 |
21 | 8 | 99 | 84 | 0 | 0 | 35.4 | 0.388 | 50 | 0 |
22 | 7 | 196 | 90 | 0 | 0 | 39.8 | 0.451 | 41 | 1 |
23 | 9 | 119 | 80 | 35 | 0 | 29.0 | 0.263 | 29 | 1 |
24 | 11 | 143 | 94 | 33 | 146 | 36.6 | 0.254 | 51 | 1 |
25 | 10 | 125 | 70 | 26 | 115 | 31.1 | 0.205 | 41 | 1 |
26 | 7 | 147 | 76 | 0 | 0 | 39.4 | 0.257 | 43 | 1 |
27 | 1 | 97 | 66 | 15 | 140 | 23.2 | 0.487 | 22 | 0 |
28 | 13 | 145 | 82 | 19 | 110 | 22.2 | 0.245 | 57 | 0 |
29 | 5 | 117 | 92 | 0 | 0 | 34.1 | 0.337 | 38 | 0 |
30 | 5 | 109 | 75 | 26 | 0 | 36.0 | 0.546 | 60 | 0 |
31 | 3 | 158 | 76 | 36 | 245 | 31.6 | 0.851 | 28 | 1 |
32 | 3 | 88 | 58 | 11 | 54 | 24.8 | 0.267 | 22 | 0 |
33 | 6 | 92 | 92 | 0 | 0 | 19.9 | 0.188 | 28 | 0 |
34 | 10 | 122 | 78 | 31 | 0 | 27.6 | 0.512 | 45 | 0 |
35 | 4 | 103 | 60 | 33 | 192 | 24.0 | 0.966 | 33 | 0 |
36 | 11 | 138 | 76 | 0 | 0 | 33.2 | 0.420 | 35 | 0 |
37 | 9 | 102 | 76 | 37 | 0 | 32.9 | 0.665 | 46 | 1 |
38 | 2 | 90 | 68 | 42 | 0 | 38.2 | 0.503 | 27 | 1 |
39 | 4 | 111 | 72 | 47 | 207 | 37.1 | 1.390 | 56 | 1 |
40 | 3 | 180 | 64 | 25 | 70 | 34.0 | 0.271 | 26 | 0 |
41 | 7 | 133 | 84 | 0 | 0 | 40.2 | 0.696 | 37 | 0 |
42 | 7 | 106 | 92 | 18 | 0 | 22.7 | 0.235 | 48 | 0 |
43 | 9 | 171 | 110 | 24 | 240 | 45.4 | 0.721 | 54 | 1 |
44 | 7 | 159 | 64 | 0 | 0 | 27.4 | 0.294 | 40 | 0 |
45 | 0 | 180 | 66 | 39 | 0 | 42.0 | 1.893 | 25 | 1 |
46 | 1 | 146 | 56 | 0 | 0 | 29.7 | 0.564 | 29 | 0 |
47 | 2 | 71 | 70 | 27 | 0 | 28.0 | 0.586 | 22 | 0 |
48 | 7 | 103 | 66 | 32 | 0 | 39.1 | 0.344 | 31 | 1 |
49 | 7 | 105 | 0 | 0 | 0 | 0.0 | 0.305 | 24 | 0 |
50 | 1 | 103 | 80 | 11 | 82 | 19.4 | 0.491 | 22 | 0 |
51 | 1 | 101 | 50 | 15 | 36 | 24.2 | 0.526 | 26 | 0 |
52 | 5 | 88 | 66 | 21 | 23 | 24.4 | 0.342 | 30 | 0 |
53 | 8 | 176 | 90 | 34 | 300 | 33.7 | 0.467 | 58 | 1 |
54 | 7 | 150 | 66 | 42 | 342 | 34.7 | 0.718 | 42 | 0 |
55 | 1 | 73 | 50 | 10 | 0 | 23.0 | 0.248 | 21 | 0 |
56 | 7 | 187 | 68 | 39 | 304 | 37.7 | 0.254 | 41 | 1 |
57 | 0 | 100 | 88 | 60 | 110 | 46.8 | 0.962 | 31 | 0 |
58 | 0 | 146 | 82 | 0 | 0 | 40.5 | 1.781 | 44 | 0 |
59 | 0 | 105 | 64 | 41 | 142 | 41.5 | 0.173 | 22 | 0 |
60 | 2 | 84 | 0 | 0 | 0 | 0.0 | 0.304 | 21 | 0 |
61 | 8 | 133 | 72 | 0 | 0 | 32.9 | 0.270 | 39 | 1 |
62 | 5 | 44 | 62 | 0 | 0 | 25.0 | 0.587 | 36 | 0 |
63 | 2 | 141 | 58 | 34 | 128 | 25.4 | 0.699 | 24 | 0 |
64 | 7 | 114 | 66 | 0 | 0 | 32.8 | 0.258 | 42 | 1 |
65 | 5 | 99 | 74 | 27 | 0 | 29.0 | 0.203 | 32 | 0 |
66 | 0 | 109 | 88 | 30 | 0 | 32.5 | 0.855 | 38 | 1 |
67 | 2 | 109 | 92 | 0 | 0 | 42.7 | 0.845 | 54 | 0 |
68 | 1 | 95 | 66 | 13 | 38 | 19.6 | 0.334 | 25 | 0 |
69 | 4 | 146 | 85 | 27 | 100 | 28.9 | 0.189 | 27 | 0 |
70 | 2 | 100 | 66 | 20 | 90 | 32.9 | 0.867 | 28 | 1 |
71 | 5 | 139 | 64 | 35 | 140 | 28.6 | 0.411 | 26 | 0 |
72 | 13 | 126 | 90 | 0 | 0 | 43.4 | 0.583 | 42 | 1 |
73 | 4 | 129 | 86 | 20 | 270 | 35.1 | 0.231 | 23 | 0 |
74 | 1 | 79 | 75 | 30 | 0 | 32.0 | 0.396 | 22 | 0 |
75 | 1 | 0 | 48 | 20 | 0 | 24.7 | 0.140 | 22 | 0 |
76 | 7 | 62 | 78 | 0 | 0 | 32.6 | 0.391 | 41 | 0 |
77 | 5 | 95 | 72 | 33 | 0 | 37.7 | 0.370 | 27 | 0 |
78 | 0 | 131 | 0 | 0 | 0 | 43.2 | 0.270 | 26 | 1 |
79 | 2 | 112 | 66 | 22 | 0 | 25.0 | 0.307 | 24 | 0 |
80 | 3 | 113 | 44 | 13 | 0 | 22.4 | 0.140 | 22 | 0 |
81 | 2 | 74 | 0 | 0 | 0 | 0.0 | 0.102 | 22 | 0 |
82 | 7 | 83 | 78 | 26 | 71 | 29.3 | 0.767 | 36 | 0 |
83 | 0 | 101 | 65 | 28 | 0 | 24.6 | 0.237 | 22 | 0 |
84 | 5 | 137 | 108 | 0 | 0 | 48.8 | 0.227 | 37 | 1 |
85 | 2 | 110 | 74 | 29 | 125 | 32.4 | 0.698 | 27 | 0 |
86 | 13 | 106 | 72 | 54 | 0 | 36.6 | 0.178 | 45 | 0 |
87 | 2 | 100 | 68 | 25 | 71 | 38.5 | 0.324 | 26 | 0 |
88 | 15 | 136 | 70 | 32 | 110 | 37.1 | 0.153 | 43 | 1 |
89 | 1 | 107 | 68 | 19 | 0 | 26.5 | 0.165 | 24 | 0 |
90 | 1 | 80 | 55 | 0 | 0 | 19.1 | 0.258 | 21 | 0 |
91 | 4 | 123 | 80 | 15 | 176 | 32.0 | 0.443 | 34 | 0 |
92 | 7 | 81 | 78 | 40 | 48 | 46.7 | 0.261 | 42 | 0 |
93 | 4 | 134 | 72 | 0 | 0 | 23.8 | 0.277 | 60 | 1 |
94 | 2 | 142 | 82 | 18 | 64 | 24.7 | 0.761 | 21 | 0 |
95 | 6 | 144 | 72 | 27 | 228 | 33.9 | 0.255 | 40 | 0 |
96 | 2 | 92 | 62 | 28 | 0 | 31.6 | 0.130 | 24 | 0 |
97 | 1 | 71 | 48 | 18 | 76 | 20.4 | 0.323 | 22 | 0 |
98 | 6 | 93 | 50 | 30 | 64 | 28.7 | 0.356 | 23 | 0 |
99 | 1 | 122 | 90 | 51 | 220 | 49.7 | 0.325 | 31 | 1 |
100 | 1 | 163 | 72 | 0 | 0 | 39.0 | 1.222 | 33 | 1 |
101 | 1 | 151 | 60 | 0 | 0 | 26.1 | 0.179 | 22 | 0 |
102 | 0 | 125 | 96 | 0 | 0 | 22.5 | 0.262 | 21 | 0 |
103 | 1 | 81 | 72 | 18 | 40 | 26.6 | 0.283 | 24 | 0 |
104 | 2 | 85 | 65 | 0 | 0 | 39.6 | 0.930 | 27 | 0 |
105 | 1 | 126 | 56 | 29 | 152 | 28.7 | 0.801 | 21 | 0 |
106 | 1 | 96 | 122 | 0 | 0 | 22.4 | 0.207 | 27 | 0 |
107 | 4 | 144 | 58 | 28 | 140 | 29.5 | 0.287 | 37 | 0 |
108 | 3 | 83 | 58 | 31 | 18 | 34.3 | 0.336 | 25 | 0 |
109 | 0 | 95 | 85 | 25 | 36 | 37.4 | 0.247 | 24 | 1 |
110 | 3 | 171 | 72 | 33 | 135 | 33.3 | 0.199 | 24 | 1 |
111 | 8 | 155 | 62 | 26 | 495 | 34.0 | 0.543 | 46 | 1 |
112 | 1 | 89 | 76 | 34 | 37 | 31.2 | 0.192 | 23 | 0 |
113 | 4 | 76 | 62 | 0 | 0 | 34.0 | 0.391 | 25 | 0 |
114 | 7 | 160 | 54 | 32 | 175 | 30.5 | 0.588 | 39 | 1 |
115 | 4 | 146 | 92 | 0 | 0 | 31.2 | 0.539 | 61 | 1 |
116 | 5 | 124 | 74 | 0 | 0 | 34.0 | 0.220 | 38 | 1 |
117 | 5 | 78 | 48 | 0 | 0 | 33.7 | 0.654 | 25 | 0 |
118 | 4 | 97 | 60 | 23 | 0 | 28.2 | 0.443 | 22 | 0 |
119 | 4 | 99 | 76 | 15 | 51 | 23.2 | 0.223 | 21 | 0 |
120 | 0 | 162 | 76 | 56 | 100 | 53.2 | 0.759 | 25 | 1 |
121 | 6 | 111 | 64 | 39 | 0 | 34.2 | 0.260 | 24 | 0 |
122 | 2 | 107 | 74 | 30 | 100 | 33.6 | 0.404 | 23 | 0 |
123 | 5 | 132 | 80 | 0 | 0 | 26.8 | 0.186 | 69 | 0 |
124 | 0 | 113 | 76 | 0 | 0 | 33.3 | 0.278 | 23 | 1 |
125 | 1 | 88 | 30 | 42 | 99 | 55.0 | 0.496 | 26 | 1 |
126 | 3 | 120 | 70 | 30 | 135 | 42.9 | 0.452 | 30 | 0 |
127 | 1 | 118 | 58 | 36 | 94 | 33.3 | 0.261 | 23 | 0 |
128 | 1 | 117 | 88 | 24 | 145 | 34.5 | 0.403 | 40 | 1 |
129 | 0 | 105 | 84 | 0 | 0 | 27.9 | 0.741 | 62 | 1 |
130 | 4 | 173 | 70 | 14 | 168 | 29.7 | 0.361 | 33 | 1 |
131 | 9 | 122 | 56 | 0 | 0 | 33.3 | 1.114 | 33 | 1 |
132 | 3 | 170 | 64 | 37 | 225 | 34.5 | 0.356 | 30 | 1 |
133 | 8 | 84 | 74 | 31 | 0 | 38.3 | 0.457 | 39 | 0 |
134 | 2 | 96 | 68 | 13 | 49 | 21.1 | 0.647 | 26 | 0 |
135 | 2 | 125 | 60 | 20 | 140 | 33.8 | 0.088 | 31 | 0 |
136 | 0 | 100 | 70 | 26 | 50 | 30.8 | 0.597 | 21 | 0 |
137 | 0 | 93 | 60 | 25 | 92 | 28.7 | 0.532 | 22 | 0 |
138 | 0 | 129 | 80 | 0 | 0 | 31.2 | 0.703 | 29 | 0 |
139 | 5 | 105 | 72 | 29 | 325 | 36.9 | 0.159 | 28 | 0 |
140 | 3 | 128 | 78 | 0 | 0 | 21.1 | 0.268 | 55 | 0 |
141 | 5 | 106 | 82 | 30 | 0 | 39.5 | 0.286 | 38 | 0 |
142 | 2 | 108 | 52 | 26 | 63 | 32.5 | 0.318 | 22 | 0 |
143 | 10 | 108 | 66 | 0 | 0 | 32.4 | 0.272 | 42 | 1 |
144 | 4 | 154 | 62 | 31 | 284 | 32.8 | 0.237 | 23 | 0 |
145 | 0 | 102 | 75 | 23 | 0 | 0.0 | 0.572 | 21 | 0 |
146 | 9 | 57 | 80 | 37 | 0 | 32.8 | 0.096 | 41 | 0 |
147 | 2 | 106 | 64 | 35 | 119 | 30.5 | 1.400 | 34 | 0 |
148 | 5 | 147 | 78 | 0 | 0 | 33.7 | 0.218 | 65 | 0 |
149 | 2 | 90 | 70 | 17 | 0 | 27.3 | 0.085 | 22 | 0 |
150 | 1 | 136 | 74 | 50 | 204 | 37.4 | 0.399 | 24 | 0 |
151 | 4 | 114 | 65 | 0 | 0 | 21.9 | 0.432 | 37 | 0 |
152 | 9 | 156 | 86 | 28 | 155 | 34.3 | 1.189 | 42 | 1 |
153 | 1 | 153 | 82 | 42 | 485 | 40.6 | 0.687 | 23 | 0 |
154 | 8 | 188 | 78 | 0 | 0 | 47.9 | 0.137 | 43 | 1 |
155 | 7 | 152 | 88 | 44 | 0 | 50.0 | 0.337 | 36 | 1 |
156 | 2 | 99 | 52 | 15 | 94 | 24.6 | 0.637 | 21 | 0 |
157 | 1 | 109 | 56 | 21 | 135 | 25.2 | 0.833 | 23 | 0 |
158 | 2 | 88 | 74 | 19 | 53 | 29.0 | 0.229 | 22 | 0 |
159 | 17 | 163 | 72 | 41 | 114 | 40.9 | 0.817 | 47 | 1 |
160 | 4 | 151 | 90 | 38 | 0 | 29.7 | 0.294 | 36 | 0 |
161 | 7 | 102 | 74 | 40 | 105 | 37.2 | 0.204 | 45 | 0 |
162 | 0 | 114 | 80 | 34 | 285 | 44.2 | 0.167 | 27 | 0 |
163 | 2 | 100 | 64 | 23 | 0 | 29.7 | 0.368 | 21 | 0 |
164 | 0 | 131 | 88 | 0 | 0 | 31.6 | 0.743 | 32 | 1 |
165 | 6 | 104 | 74 | 18 | 156 | 29.9 | 0.722 | 41 | 1 |
166 | 3 | 148 | 66 | 25 | 0 | 32.5 | 0.256 | 22 | 0 |
167 | 4 | 120 | 68 | 0 | 0 | 29.6 | 0.709 | 34 | 0 |
168 | 4 | 110 | 66 | 0 | 0 | 31.9 | 0.471 | 29 | 0 |
169 | 3 | 111 | 90 | 12 | 78 | 28.4 | 0.495 | 29 | 0 |
170 | 6 | 102 | 82 | 0 | 0 | 30.8 | 0.180 | 36 | 1 |
171 | 6 | 134 | 70 | 23 | 130 | 35.4 | 0.542 | 29 | 1 |
172 | 2 | 87 | 0 | 23 | 0 | 28.9 | 0.773 | 25 | 0 |
173 | 1 | 79 | 60 | 42 | 48 | 43.5 | 0.678 | 23 | 0 |
174 | 2 | 75 | 64 | 24 | 55 | 29.7 | 0.370 | 33 | 0 |
175 | 8 | 179 | 72 | 42 | 130 | 32.7 | 0.719 | 36 | 1 |
176 | 6 | 85 | 78 | 0 | 0 | 31.2 | 0.382 | 42 | 0 |
177 | 0 | 129 | 110 | 46 | 130 | 67.1 | 0.319 | 26 | 1 |
178 | 5 | 143 | 78 | 0 | 0 | 45.0 | 0.190 | 47 | 0 |
179 | 5 | 130 | 82 | 0 | 0 | 39.1 | 0.956 | 37 | 1 |
180 | 6 | 87 | 80 | 0 | 0 | 23.2 | 0.084 | 32 | 0 |
181 | 0 | 119 | 64 | 18 | 92 | 34.9 | 0.725 | 23 | 0 |
182 | 1 | 0 | 74 | 20 | 23 | 27.7 | 0.299 | 21 | 0 |
183 | 5 | 73 | 60 | 0 | 0 | 26.8 | 0.268 | 27 | 0 |
184 | 4 | 141 | 74 | 0 | 0 | 27.6 | 0.244 | 40 | 0 |
185 | 7 | 194 | 68 | 28 | 0 | 35.9 | 0.745 | 41 | 1 |
186 | 8 | 181 | 68 | 36 | 495 | 30.1 | 0.615 | 60 | 1 |
187 | 1 | 128 | 98 | 41 | 58 | 32.0 | 1.321 | 33 | 1 |
188 | 8 | 109 | 76 | 39 | 114 | 27.9 | 0.640 | 31 | 1 |
189 | 5 | 139 | 80 | 35 | 160 | 31.6 | 0.361 | 25 | 1 |
190 | 3 | 111 | 62 | 0 | 0 | 22.6 | 0.142 | 21 | 0 |
191 | 9 | 123 | 70 | 44 | 94 | 33.1 | 0.374 | 40 | 0 |
192 | 7 | 159 | 66 | 0 | 0 | 30.4 | 0.383 | 36 | 1 |
193 | 11 | 135 | 0 | 0 | 0 | 52.3 | 0.578 | 40 | 1 |
194 | 8 | 85 | 55 | 20 | 0 | 24.4 | 0.136 | 42 | 0 |
195 | 5 | 158 | 84 | 41 | 210 | 39.4 | 0.395 | 29 | 1 |
196 | 1 | 105 | 58 | 0 | 0 | 24.3 | 0.187 | 21 | 0 |
197 | 3 | 107 | 62 | 13 | 48 | 22.9 | 0.678 | 23 | 1 |
198 | 4 | 109 | 64 | 44 | 99 | 34.8 | 0.905 | 26 | 1 |
199 | 4 | 148 | 60 | 27 | 318 | 30.9 | 0.150 | 29 | 1 |
200 | 0 | 113 | 80 | 16 | 0 | 31.0 | 0.874 | 21 | 0 |
201 | 1 | 138 | 82 | 0 | 0 | 40.1 | 0.236 | 28 | 0 |
202 | 0 | 108 | 68 | 20 | 0 | 27.3 | 0.787 | 32 | 0 |
203 | 2 | 99 | 70 | 16 | 44 | 20.4 | 0.235 | 27 | 0 |
204 | 6 | 103 | 72 | 32 | 190 | 37.7 | 0.324 | 55 | 0 |
205 | 5 | 111 | 72 | 28 | 0 | 23.9 | 0.407 | 27 | 0 |
206 | 8 | 196 | 76 | 29 | 280 | 37.5 | 0.605 | 57 | 1 |
207 | 5 | 162 | 104 | 0 | 0 | 37.7 | 0.151 | 52 | 1 |
208 | 1 | 96 | 64 | 27 | 87 | 33.2 | 0.289 | 21 | 0 |
209 | 7 | 184 | 84 | 33 | 0 | 35.5 | 0.355 | 41 | 1 |
210 | 2 | 81 | 60 | 22 | 0 | 27.7 | 0.290 | 25 | 0 |
211 | 0 | 147 | 85 | 54 | 0 | 42.8 | 0.375 | 24 | 0 |
212 | 7 | 179 | 95 | 31 | 0 | 34.2 | 0.164 | 60 | 0 |
213 | 0 | 140 | 65 | 26 | 130 | 42.6 | 0.431 | 24 | 1 |
214 | 9 | 112 | 82 | 32 | 175 | 34.2 | 0.260 | 36 | 1 |
215 | 12 | 151 | 70 | 40 | 271 | 41.8 | 0.742 | 38 | 1 |
216 | 5 | 109 | 62 | 41 | 129 | 35.8 | 0.514 | 25 | 1 |
217 | 6 | 125 | 68 | 30 | 120 | 30.0 | 0.464 | 32 | 0 |
218 | 5 | 85 | 74 | 22 | 0 | 29.0 | 1.224 | 32 | 1 |
219 | 5 | 112 | 66 | 0 | 0 | 37.8 | 0.261 | 41 | 1 |
220 | 0 | 177 | 60 | 29 | 478 | 34.6 | 1.072 | 21 | 1 |
221 | 2 | 158 | 90 | 0 | 0 | 31.6 | 0.805 | 66 | 1 |
222 | 7 | 119 | 0 | 0 | 0 | 25.2 | 0.209 | 37 | 0 |
223 | 7 | 142 | 60 | 33 | 190 | 28.8 | 0.687 | 61 | 0 |
224 | 1 | 100 | 66 | 15 | 56 | 23.6 | 0.666 | 26 | 0 |
225 | 1 | 87 | 78 | 27 | 32 | 34.6 | 0.101 | 22 | 0 |
226 | 0 | 101 | 76 | 0 | 0 | 35.7 | 0.198 | 26 | 0 |
227 | 3 | 162 | 52 | 38 | 0 | 37.2 | 0.652 | 24 | 1 |
228 | 4 | 197 | 70 | 39 | 744 | 36.7 | 2.329 | 31 | 0 |
229 | 0 | 117 | 80 | 31 | 53 | 45.2 | 0.089 | 24 | 0 |
230 | 4 | 142 | 86 | 0 | 0 | 44.0 | 0.645 | 22 | 1 |
231 | 6 | 134 | 80 | 37 | 370 | 46.2 | 0.238 | 46 | 1 |
232 | 1 | 79 | 80 | 25 | 37 | 25.4 | 0.583 | 22 | 0 |
233 | 4 | 122 | 68 | 0 | 0 | 35.0 | 0.394 | 29 | 0 |
234 | 3 | 74 | 68 | 28 | 45 | 29.7 | 0.293 | 23 | 0 |
235 | 4 | 171 | 72 | 0 | 0 | 43.6 | 0.479 | 26 | 1 |
236 | 7 | 181 | 84 | 21 | 192 | 35.9 | 0.586 | 51 | 1 |
237 | 0 | 179 | 90 | 27 | 0 | 44.1 | 0.686 | 23 | 1 |
238 | 9 | 164 | 84 | 21 | 0 | 30.8 | 0.831 | 32 | 1 |
239 | 0 | 104 | 76 | 0 | 0 | 18.4 | 0.582 | 27 | 0 |
240 | 1 | 91 | 64 | 24 | 0 | 29.2 | 0.192 | 21 | 0 |
241 | 4 | 91 | 70 | 32 | 88 | 33.1 | 0.446 | 22 | 0 |
242 | 3 | 139 | 54 | 0 | 0 | 25.6 | 0.402 | 22 | 1 |
243 | 6 | 119 | 50 | 22 | 176 | 27.1 | 1.318 | 33 | 1 |
244 | 2 | 146 | 76 | 35 | 194 | 38.2 | 0.329 | 29 | 0 |
245 | 9 | 184 | 85 | 15 | 0 | 30.0 | 1.213 | 49 | 1 |
246 | 10 | 122 | 68 | 0 | 0 | 31.2 | 0.258 | 41 | 0 |
247 | 0 | 165 | 90 | 33 | 680 | 52.3 | 0.427 | 23 | 0 |
248 | 9 | 124 | 70 | 33 | 402 | 35.4 | 0.282 | 34 | 0 |
249 | 1 | 111 | 86 | 19 | 0 | 30.1 | 0.143 | 23 | 0 |
250 | 9 | 106 | 52 | 0 | 0 | 31.2 | 0.380 | 42 | 0 |
251 | 2 | 129 | 84 | 0 | 0 | 28.0 | 0.284 | 27 | 0 |
252 | 2 | 90 | 80 | 14 | 55 | 24.4 | 0.249 | 24 | 0 |
253 | 0 | 86 | 68 | 32 | 0 | 35.8 | 0.238 | 25 | 0 |
254 | 12 | 92 | 62 | 7 | 258 | 27.6 | 0.926 | 44 | 1 |
255 | 1 | 113 | 64 | 35 | 0 | 33.6 | 0.543 | 21 | 1 |
256 | 3 | 111 | 56 | 39 | 0 | 30.1 | 0.557 | 30 | 0 |
257 | 2 | 114 | 68 | 22 | 0 | 28.7 | 0.092 | 25 | 0 |
258 | 1 | 193 | 50 | 16 | 375 | 25.9 | 0.655 | 24 | 0 |
259 | 11 | 155 | 76 | 28 | 150 | 33.3 | 1.353 | 51 | 1 |
260 | 3 | 191 | 68 | 15 | 130 | 30.9 | 0.299 | 34 | 0 |
261 | 3 | 141 | 0 | 0 | 0 | 30.0 | 0.761 | 27 | 1 |
262 | 4 | 95 | 70 | 32 | 0 | 32.1 | 0.612 | 24 | 0 |
263 | 3 | 142 | 80 | 15 | 0 | 32.4 | 0.200 | 63 | 0 |
264 | 4 | 123 | 62 | 0 | 0 | 32.0 | 0.226 | 35 | 1 |
265 | 5 | 96 | 74 | 18 | 67 | 33.6 | 0.997 | 43 | 0 |
266 | 0 | 138 | 0 | 0 | 0 | 36.3 | 0.933 | 25 | 1 |
267 | 2 | 128 | 64 | 42 | 0 | 40.0 | 1.101 | 24 | 0 |
268 | 0 | 102 | 52 | 0 | 0 | 25.1 | 0.078 | 21 | 0 |
269 | 2 | 146 | 0 | 0 | 0 | 27.5 | 0.240 | 28 | 1 |
270 | 10 | 101 | 86 | 37 | 0 | 45.6 | 1.136 | 38 | 1 |
271 | 2 | 108 | 62 | 32 | 56 | 25.2 | 0.128 | 21 | 0 |
272 | 3 | 122 | 78 | 0 | 0 | 23.0 | 0.254 | 40 | 0 |
273 | 1 | 71 | 78 | 50 | 45 | 33.2 | 0.422 | 21 | 0 |
274 | 13 | 106 | 70 | 0 | 0 | 34.2 | 0.251 | 52 | 0 |
275 | 2 | 100 | 70 | 52 | 57 | 40.5 | 0.677 | 25 | 0 |
276 | 7 | 106 | 60 | 24 | 0 | 26.5 | 0.296 | 29 | 1 |
277 | 0 | 104 | 64 | 23 | 116 | 27.8 | 0.454 | 23 | 0 |
278 | 5 | 114 | 74 | 0 | 0 | 24.9 | 0.744 | 57 | 0 |
279 | 2 | 108 | 62 | 10 | 278 | 25.3 | 0.881 | 22 | 0 |
280 | 0 | 146 | 70 | 0 | 0 | 37.9 | 0.334 | 28 | 1 |
281 | 10 | 129 | 76 | 28 | 122 | 35.9 | 0.280 | 39 | 0 |
282 | 7 | 133 | 88 | 15 | 155 | 32.4 | 0.262 | 37 | 0 |
283 | 7 | 161 | 86 | 0 | 0 | 30.4 | 0.165 | 47 | 1 |
284 | 2 | 108 | 80 | 0 | 0 | 27.0 | 0.259 | 52 | 1 |
285 | 7 | 136 | 74 | 26 | 135 | 26.0 | 0.647 | 51 | 0 |
286 | 5 | 155 | 84 | 44 | 545 | 38.7 | 0.619 | 34 | 0 |
287 | 1 | 119 | 86 | 39 | 220 | 45.6 | 0.808 | 29 | 1 |
288 | 4 | 96 | 56 | 17 | 49 | 20.8 | 0.340 | 26 | 0 |
289 | 5 | 108 | 72 | 43 | 75 | 36.1 | 0.263 | 33 | 0 |
290 | 0 | 78 | 88 | 29 | 40 | 36.9 | 0.434 | 21 | 0 |
291 | 0 | 107 | 62 | 30 | 74 | 36.6 | 0.757 | 25 | 1 |
292 | 2 | 128 | 78 | 37 | 182 | 43.3 | 1.224 | 31 | 1 |
293 | 1 | 128 | 48 | 45 | 194 | 40.5 | 0.613 | 24 | 1 |
294 | 0 | 161 | 50 | 0 | 0 | 21.9 | 0.254 | 65 | 0 |
295 | 6 | 151 | 62 | 31 | 120 | 35.5 | 0.692 | 28 | 0 |
296 | 2 | 146 | 70 | 38 | 360 | 28.0 | 0.337 | 29 | 1 |
297 | 0 | 126 | 84 | 29 | 215 | 30.7 | 0.520 | 24 | 0 |
298 | 14 | 100 | 78 | 25 | 184 | 36.6 | 0.412 | 46 | 1 |
299 | 8 | 112 | 72 | 0 | 0 | 23.6 | 0.840 | 58 | 0 |
300 | 0 | 167 | 0 | 0 | 0 | 32.3 | 0.839 | 30 | 1 |
301 | 2 | 144 | 58 | 33 | 135 | 31.6 | 0.422 | 25 | 1 |
302 | 5 | 77 | 82 | 41 | 42 | 35.8 | 0.156 | 35 | 0 |
303 | 5 | 115 | 98 | 0 | 0 | 52.9 | 0.209 | 28 | 1 |
304 | 3 | 150 | 76 | 0 | 0 | 21.0 | 0.207 | 37 | 0 |
305 | 2 | 120 | 76 | 37 | 105 | 39.7 | 0.215 | 29 | 0 |
306 | 10 | 161 | 68 | 23 | 132 | 25.5 | 0.326 | 47 | 1 |
307 | 0 | 137 | 68 | 14 | 148 | 24.8 | 0.143 | 21 | 0 |
308 | 0 | 128 | 68 | 19 | 180 | 30.5 | 1.391 | 25 | 1 |
309 | 2 | 124 | 68 | 28 | 205 | 32.9 | 0.875 | 30 | 1 |
310 | 6 | 80 | 66 | 30 | 0 | 26.2 | 0.313 | 41 | 0 |
311 | 0 | 106 | 70 | 37 | 148 | 39.4 | 0.605 | 22 | 0 |
312 | 2 | 155 | 74 | 17 | 96 | 26.6 | 0.433 | 27 | 1 |
313 | 3 | 113 | 50 | 10 | 85 | 29.5 | 0.626 | 25 | 0 |
314 | 7 | 109 | 80 | 31 | 0 | 35.9 | 1.127 | 43 | 1 |
315 | 2 | 112 | 68 | 22 | 94 | 34.1 | 0.315 | 26 | 0 |
316 | 3 | 99 | 80 | 11 | 64 | 19.3 | 0.284 | 30 | 0 |
317 | 3 | 182 | 74 | 0 | 0 | 30.5 | 0.345 | 29 | 1 |
318 | 3 | 115 | 66 | 39 | 140 | 38.1 | 0.150 | 28 | 0 |
319 | 6 | 194 | 78 | 0 | 0 | 23.5 | 0.129 | 59 | 1 |
320 | 4 | 129 | 60 | 12 | 231 | 27.5 | 0.527 | 31 | 0 |
321 | 3 | 112 | 74 | 30 | 0 | 31.6 | 0.197 | 25 | 1 |
322 | 0 | 124 | 70 | 20 | 0 | 27.4 | 0.254 | 36 | 1 |
323 | 13 | 152 | 90 | 33 | 29 | 26.8 | 0.731 | 43 | 1 |
324 | 2 | 112 | 75 | 32 | 0 | 35.7 | 0.148 | 21 | 0 |
325 | 1 | 157 | 72 | 21 | 168 | 25.6 | 0.123 | 24 | 0 |
326 | 1 | 122 | 64 | 32 | 156 | 35.1 | 0.692 | 30 | 1 |
327 | 10 | 179 | 70 | 0 | 0 | 35.1 | 0.200 | 37 | 0 |
328 | 2 | 102 | 86 | 36 | 120 | 45.5 | 0.127 | 23 | 1 |
329 | 6 | 105 | 70 | 32 | 68 | 30.8 | 0.122 | 37 | 0 |
330 | 8 | 118 | 72 | 19 | 0 | 23.1 | 1.476 | 46 | 0 |
331 | 2 | 87 | 58 | 16 | 52 | 32.7 | 0.166 | 25 | 0 |
332 | 1 | 180 | 0 | 0 | 0 | 43.3 | 0.282 | 41 | 1 |
333 | 12 | 106 | 80 | 0 | 0 | 23.6 | 0.137 | 44 | 0 |
334 | 1 | 95 | 60 | 18 | 58 | 23.9 | 0.260 | 22 | 0 |
335 | 0 | 165 | 76 | 43 | 255 | 47.9 | 0.259 | 26 | 0 |
336 | 0 | 117 | 0 | 0 | 0 | 33.8 | 0.932 | 44 | 0 |
337 | 5 | 115 | 76 | 0 | 0 | 31.2 | 0.343 | 44 | 1 |
338 | 9 | 152 | 78 | 34 | 171 | 34.2 | 0.893 | 33 | 1 |
339 | 7 | 178 | 84 | 0 | 0 | 39.9 | 0.331 | 41 | 1 |
340 | 1 | 130 | 70 | 13 | 105 | 25.9 | 0.472 | 22 | 0 |
341 | 1 | 95 | 74 | 21 | 73 | 25.9 | 0.673 | 36 | 0 |
342 | 1 | 0 | 68 | 35 | 0 | 32.0 | 0.389 | 22 | 0 |
343 | 5 | 122 | 86 | 0 | 0 | 34.7 | 0.290 | 33 | 0 |
344 | 8 | 95 | 72 | 0 | 0 | 36.8 | 0.485 | 57 | 0 |
345 | 8 | 126 | 88 | 36 | 108 | 38.5 | 0.349 | 49 | 0 |
346 | 1 | 139 | 46 | 19 | 83 | 28.7 | 0.654 | 22 | 0 |
347 | 3 | 116 | 0 | 0 | 0 | 23.5 | 0.187 | 23 | 0 |
348 | 3 | 99 | 62 | 19 | 74 | 21.8 | 0.279 | 26 | 0 |
349 | 5 | 0 | 80 | 32 | 0 | 41.0 | 0.346 | 37 | 1 |
350 | 4 | 92 | 80 | 0 | 0 | 42.2 | 0.237 | 29 | 0 |
351 | 4 | 137 | 84 | 0 | 0 | 31.2 | 0.252 | 30 | 0 |
352 | 3 | 61 | 82 | 28 | 0 | 34.4 | 0.243 | 46 | 0 |
353 | 1 | 90 | 62 | 12 | 43 | 27.2 | 0.580 | 24 | 0 |
354 | 3 | 90 | 78 | 0 | 0 | 42.7 | 0.559 | 21 | 0 |
355 | 9 | 165 | 88 | 0 | 0 | 30.4 | 0.302 | 49 | 1 |
356 | 1 | 125 | 50 | 40 | 167 | 33.3 | 0.962 | 28 | 1 |
357 | 13 | 129 | 0 | 30 | 0 | 39.9 | 0.569 | 44 | 1 |
358 | 12 | 88 | 74 | 40 | 54 | 35.3 | 0.378 | 48 | 0 |
359 | 1 | 196 | 76 | 36 | 249 | 36.5 | 0.875 | 29 | 1 |
360 | 5 | 189 | 64 | 33 | 325 | 31.2 | 0.583 | 29 | 1 |
361 | 5 | 158 | 70 | 0 | 0 | 29.8 | 0.207 | 63 | 0 |
362 | 5 | 103 | 108 | 37 | 0 | 39.2 | 0.305 | 65 | 0 |
363 | 4 | 146 | 78 | 0 | 0 | 38.5 | 0.520 | 67 | 1 |
364 | 4 | 147 | 74 | 25 | 293 | 34.9 | 0.385 | 30 | 0 |
365 | 5 | 99 | 54 | 28 | 83 | 34.0 | 0.499 | 30 | 0 |
366 | 6 | 124 | 72 | 0 | 0 | 27.6 | 0.368 | 29 | 1 |
367 | 0 | 101 | 64 | 17 | 0 | 21.0 | 0.252 | 21 | 0 |
368 | 3 | 81 | 86 | 16 | 66 | 27.5 | 0.306 | 22 | 0 |
369 | 1 | 133 | 102 | 28 | 140 | 32.8 | 0.234 | 45 | 1 |
370 | 3 | 173 | 82 | 48 | 465 | 38.4 | 2.137 | 25 | 1 |
371 | 0 | 118 | 64 | 23 | 89 | 0.0 | 1.731 | 21 | 0 |
372 | 0 | 84 | 64 | 22 | 66 | 35.8 | 0.545 | 21 | 0 |
373 | 2 | 105 | 58 | 40 | 94 | 34.9 | 0.225 | 25 | 0 |
374 | 2 | 122 | 52 | 43 | 158 | 36.2 | 0.816 | 28 | 0 |
375 | 12 | 140 | 82 | 43 | 325 | 39.2 | 0.528 | 58 | 1 |
376 | 0 | 98 | 82 | 15 | 84 | 25.2 | 0.299 | 22 | 0 |
377 | 1 | 87 | 60 | 37 | 75 | 37.2 | 0.509 | 22 | 0 |
378 | 4 | 156 | 75 | 0 | 0 | 48.3 | 0.238 | 32 | 1 |
379 | 0 | 93 | 100 | 39 | 72 | 43.4 | 1.021 | 35 | 0 |
380 | 1 | 107 | 72 | 30 | 82 | 30.8 | 0.821 | 24 | 0 |
381 | 0 | 105 | 68 | 22 | 0 | 20.0 | 0.236 | 22 | 0 |
382 | 1 | 109 | 60 | 8 | 182 | 25.4 | 0.947 | 21 | 0 |
383 | 1 | 90 | 62 | 18 | 59 | 25.1 | 1.268 | 25 | 0 |
384 | 1 | 125 | 70 | 24 | 110 | 24.3 | 0.221 | 25 | 0 |
385 | 1 | 119 | 54 | 13 | 50 | 22.3 | 0.205 | 24 | 0 |
386 | 5 | 116 | 74 | 29 | 0 | 32.3 | 0.660 | 35 | 1 |
387 | 8 | 105 | 100 | 36 | 0 | 43.3 | 0.239 | 45 | 1 |
388 | 5 | 144 | 82 | 26 | 285 | 32.0 | 0.452 | 58 | 1 |
389 | 3 | 100 | 68 | 23 | 81 | 31.6 | 0.949 | 28 | 0 |
390 | 1 | 100 | 66 | 29 | 196 | 32.0 | 0.444 | 42 | 0 |
391 | 5 | 166 | 76 | 0 | 0 | 45.7 | 0.340 | 27 | 1 |
392 | 1 | 131 | 64 | 14 | 415 | 23.7 | 0.389 | 21 | 0 |
393 | 4 | 116 | 72 | 12 | 87 | 22.1 | 0.463 | 37 | 0 |
394 | 4 | 158 | 78 | 0 | 0 | 32.9 | 0.803 | 31 | 1 |
395 | 2 | 127 | 58 | 24 | 275 | 27.7 | 1.600 | 25 | 0 |
396 | 3 | 96 | 56 | 34 | 115 | 24.7 | 0.944 | 39 | 0 |
397 | 0 | 131 | 66 | 40 | 0 | 34.3 | 0.196 | 22 | 1 |
398 | 3 | 82 | 70 | 0 | 0 | 21.1 | 0.389 | 25 | 0 |
399 | 3 | 193 | 70 | 31 | 0 | 34.9 | 0.241 | 25 | 1 |
400 | 4 | 95 | 64 | 0 | 0 | 32.0 | 0.161 | 31 | 1 |
401 | 6 | 137 | 61 | 0 | 0 | 24.2 | 0.151 | 55 | 0 |
402 | 5 | 136 | 84 | 41 | 88 | 35.0 | 0.286 | 35 | 1 |
403 | 9 | 72 | 78 | 25 | 0 | 31.6 | 0.280 | 38 | 0 |
404 | 5 | 168 | 64 | 0 | 0 | 32.9 | 0.135 | 41 | 1 |
405 | 2 | 123 | 48 | 32 | 165 | 42.1 | 0.520 | 26 | 0 |
406 | 4 | 115 | 72 | 0 | 0 | 28.9 | 0.376 | 46 | 1 |
407 | 0 | 101 | 62 | 0 | 0 | 21.9 | 0.336 | 25 | 0 |
408 | 8 | 197 | 74 | 0 | 0 | 25.9 | 1.191 | 39 | 1 |
409 | 1 | 172 | 68 | 49 | 579 | 42.4 | 0.702 | 28 | 1 |
410 | 6 | 102 | 90 | 39 | 0 | 35.7 | 0.674 | 28 | 0 |
411 | 1 | 112 | 72 | 30 | 176 | 34.4 | 0.528 | 25 | 0 |
412 | 1 | 143 | 84 | 23 | 310 | 42.4 | 1.076 | 22 | 0 |
413 | 1 | 143 | 74 | 22 | 61 | 26.2 | 0.256 | 21 | 0 |
414 | 0 | 138 | 60 | 35 | 167 | 34.6 | 0.534 | 21 | 1 |
415 | 3 | 173 | 84 | 33 | 474 | 35.7 | 0.258 | 22 | 1 |
416 | 1 | 97 | 68 | 21 | 0 | 27.2 | 1.095 | 22 | 0 |
417 | 4 | 144 | 82 | 32 | 0 | 38.5 | 0.554 | 37 | 1 |
418 | 1 | 83 | 68 | 0 | 0 | 18.2 | 0.624 | 27 | 0 |
419 | 3 | 129 | 64 | 29 | 115 | 26.4 | 0.219 | 28 | 1 |
420 | 1 | 119 | 88 | 41 | 170 | 45.3 | 0.507 | 26 | 0 |
421 | 2 | 94 | 68 | 18 | 76 | 26.0 | 0.561 | 21 | 0 |
422 | 0 | 102 | 64 | 46 | 78 | 40.6 | 0.496 | 21 | 0 |
423 | 2 | 115 | 64 | 22 | 0 | 30.8 | 0.421 | 21 | 0 |
424 | 8 | 151 | 78 | 32 | 210 | 42.9 | 0.516 | 36 | 1 |
425 | 4 | 184 | 78 | 39 | 277 | 37.0 | 0.264 | 31 | 1 |
426 | 0 | 94 | 0 | 0 | 0 | 0.0 | 0.256 | 25 | 0 |
427 | 1 | 181 | 64 | 30 | 180 | 34.1 | 0.328 | 38 | 1 |
428 | 0 | 135 | 94 | 46 | 145 | 40.6 | 0.284 | 26 | 0 |
429 | 1 | 95 | 82 | 25 | 180 | 35.0 | 0.233 | 43 | 1 |
430 | 2 | 99 | 0 | 0 | 0 | 22.2 | 0.108 | 23 | 0 |
431 | 3 | 89 | 74 | 16 | 85 | 30.4 | 0.551 | 38 | 0 |
432 | 1 | 80 | 74 | 11 | 60 | 30.0 | 0.527 | 22 | 0 |
433 | 2 | 139 | 75 | 0 | 0 | 25.6 | 0.167 | 29 | 0 |
434 | 1 | 90 | 68 | 8 | 0 | 24.5 | 1.138 | 36 | 0 |
435 | 0 | 141 | 0 | 0 | 0 | 42.4 | 0.205 | 29 | 1 |
436 | 12 | 140 | 85 | 33 | 0 | 37.4 | 0.244 | 41 | 0 |
437 | 5 | 147 | 75 | 0 | 0 | 29.9 | 0.434 | 28 | 0 |
438 | 1 | 97 | 70 | 15 | 0 | 18.2 | 0.147 | 21 | 0 |
439 | 6 | 107 | 88 | 0 | 0 | 36.8 | 0.727 | 31 | 0 |
440 | 0 | 189 | 104 | 25 | 0 | 34.3 | 0.435 | 41 | 1 |
441 | 2 | 83 | 66 | 23 | 50 | 32.2 | 0.497 | 22 | 0 |
442 | 4 | 117 | 64 | 27 | 120 | 33.2 | 0.230 | 24 | 0 |
443 | 8 | 108 | 70 | 0 | 0 | 30.5 | 0.955 | 33 | 1 |
444 | 4 | 117 | 62 | 12 | 0 | 29.7 | 0.380 | 30 | 1 |
445 | 0 | 180 | 78 | 63 | 14 | 59.4 | 2.420 | 25 | 1 |
446 | 1 | 100 | 72 | 12 | 70 | 25.3 | 0.658 | 28 | 0 |
447 | 0 | 95 | 80 | 45 | 92 | 36.5 | 0.330 | 26 | 0 |
448 | 0 | 104 | 64 | 37 | 64 | 33.6 | 0.510 | 22 | 1 |
449 | 0 | 120 | 74 | 18 | 63 | 30.5 | 0.285 | 26 | 0 |
450 | 1 | 82 | 64 | 13 | 95 | 21.2 | 0.415 | 23 | 0 |
451 | 2 | 134 | 70 | 0 | 0 | 28.9 | 0.542 | 23 | 1 |
452 | 0 | 91 | 68 | 32 | 210 | 39.9 | 0.381 | 25 | 0 |
453 | 2 | 119 | 0 | 0 | 0 | 19.6 | 0.832 | 72 | 0 |
454 | 2 | 100 | 54 | 28 | 105 | 37.8 | 0.498 | 24 | 0 |
455 | 14 | 175 | 62 | 30 | 0 | 33.6 | 0.212 | 38 | 1 |
456 | 1 | 135 | 54 | 0 | 0 | 26.7 | 0.687 | 62 | 0 |
457 | 5 | 86 | 68 | 28 | 71 | 30.2 | 0.364 | 24 | 0 |
458 | 10 | 148 | 84 | 48 | 237 | 37.6 | 1.001 | 51 | 1 |
459 | 9 | 134 | 74 | 33 | 60 | 25.9 | 0.460 | 81 | 0 |
460 | 9 | 120 | 72 | 22 | 56 | 20.8 | 0.733 | 48 | 0 |
461 | 1 | 71 | 62 | 0 | 0 | 21.8 | 0.416 | 26 | 0 |
462 | 8 | 74 | 70 | 40 | 49 | 35.3 | 0.705 | 39 | 0 |
463 | 5 | 88 | 78 | 30 | 0 | 27.6 | 0.258 | 37 | 0 |
464 | 10 | 115 | 98 | 0 | 0 | 24.0 | 1.022 | 34 | 0 |
465 | 0 | 124 | 56 | 13 | 105 | 21.8 | 0.452 | 21 | 0 |
466 | 0 | 74 | 52 | 10 | 36 | 27.8 | 0.269 | 22 | 0 |
467 | 0 | 97 | 64 | 36 | 100 | 36.8 | 0.600 | 25 | 0 |
468 | 8 | 120 | 0 | 0 | 0 | 30.0 | 0.183 | 38 | 1 |
469 | 6 | 154 | 78 | 41 | 140 | 46.1 | 0.571 | 27 | 0 |
470 | 1 | 144 | 82 | 40 | 0 | 41.3 | 0.607 | 28 | 0 |
471 | 0 | 137 | 70 | 38 | 0 | 33.2 | 0.170 | 22 | 0 |
472 | 0 | 119 | 66 | 27 | 0 | 38.8 | 0.259 | 22 | 0 |
473 | 7 | 136 | 90 | 0 | 0 | 29.9 | 0.210 | 50 | 0 |
474 | 4 | 114 | 64 | 0 | 0 | 28.9 | 0.126 | 24 | 0 |
475 | 0 | 137 | 84 | 27 | 0 | 27.3 | 0.231 | 59 | 0 |
476 | 2 | 105 | 80 | 45 | 191 | 33.7 | 0.711 | 29 | 1 |
477 | 7 | 114 | 76 | 17 | 110 | 23.8 | 0.466 | 31 | 0 |
478 | 8 | 126 | 74 | 38 | 75 | 25.9 | 0.162 | 39 | 0 |
479 | 4 | 132 | 86 | 31 | 0 | 28.0 | 0.419 | 63 | 0 |
480 | 3 | 158 | 70 | 30 | 328 | 35.5 | 0.344 | 35 | 1 |
481 | 0 | 123 | 88 | 37 | 0 | 35.2 | 0.197 | 29 | 0 |
482 | 4 | 85 | 58 | 22 | 49 | 27.8 | 0.306 | 28 | 0 |
483 | 0 | 84 | 82 | 31 | 125 | 38.2 | 0.233 | 23 | 0 |
484 | 0 | 145 | 0 | 0 | 0 | 44.2 | 0.630 | 31 | 1 |
485 | 0 | 135 | 68 | 42 | 250 | 42.3 | 0.365 | 24 | 1 |
486 | 1 | 139 | 62 | 41 | 480 | 40.7 | 0.536 | 21 | 0 |
487 | 0 | 173 | 78 | 32 | 265 | 46.5 | 1.159 | 58 | 0 |
488 | 4 | 99 | 72 | 17 | 0 | 25.6 | 0.294 | 28 | 0 |
489 | 8 | 194 | 80 | 0 | 0 | 26.1 | 0.551 | 67 | 0 |
490 | 2 | 83 | 65 | 28 | 66 | 36.8 | 0.629 | 24 | 0 |
491 | 2 | 89 | 90 | 30 | 0 | 33.5 | 0.292 | 42 | 0 |
492 | 4 | 99 | 68 | 38 | 0 | 32.8 | 0.145 | 33 | 0 |
493 | 4 | 125 | 70 | 18 | 122 | 28.9 | 1.144 | 45 | 1 |
494 | 3 | 80 | 0 | 0 | 0 | 0.0 | 0.174 | 22 | 0 |
495 | 6 | 166 | 74 | 0 | 0 | 26.6 | 0.304 | 66 | 0 |
496 | 5 | 110 | 68 | 0 | 0 | 26.0 | 0.292 | 30 | 0 |
497 | 2 | 81 | 72 | 15 | 76 | 30.1 | 0.547 | 25 | 0 |
498 | 7 | 195 | 70 | 33 | 145 | 25.1 | 0.163 | 55 | 1 |
499 | 6 | 154 | 74 | 32 | 193 | 29.3 | 0.839 | 39 | 0 |
500 | 2 | 117 | 90 | 19 | 71 | 25.2 | 0.313 | 21 | 0 |
501 | 3 | 84 | 72 | 32 | 0 | 37.2 | 0.267 | 28 | 0 |
502 | 6 | 0 | 68 | 41 | 0 | 39.0 | 0.727 | 41 | 1 |
503 | 7 | 94 | 64 | 25 | 79 | 33.3 | 0.738 | 41 | 0 |
504 | 3 | 96 | 78 | 39 | 0 | 37.3 | 0.238 | 40 | 0 |
505 | 10 | 75 | 82 | 0 | 0 | 33.3 | 0.263 | 38 | 0 |
506 | 0 | 180 | 90 | 26 | 90 | 36.5 | 0.314 | 35 | 1 |
507 | 1 | 130 | 60 | 23 | 170 | 28.6 | 0.692 | 21 | 0 |
508 | 2 | 84 | 50 | 23 | 76 | 30.4 | 0.968 | 21 | 0 |
509 | 8 | 120 | 78 | 0 | 0 | 25.0 | 0.409 | 64 | 0 |
510 | 12 | 84 | 72 | 31 | 0 | 29.7 | 0.297 | 46 | 1 |
511 | 0 | 139 | 62 | 17 | 210 | 22.1 | 0.207 | 21 | 0 |
512 | 9 | 91 | 68 | 0 | 0 | 24.2 | 0.200 | 58 | 0 |
513 | 2 | 91 | 62 | 0 | 0 | 27.3 | 0.525 | 22 | 0 |
514 | 3 | 99 | 54 | 19 | 86 | 25.6 | 0.154 | 24 | 0 |
515 | 3 | 163 | 70 | 18 | 105 | 31.6 | 0.268 | 28 | 1 |
516 | 9 | 145 | 88 | 34 | 165 | 30.3 | 0.771 | 53 | 1 |
517 | 7 | 125 | 86 | 0 | 0 | 37.6 | 0.304 | 51 | 0 |
518 | 13 | 76 | 60 | 0 | 0 | 32.8 | 0.180 | 41 | 0 |
519 | 6 | 129 | 90 | 7 | 326 | 19.6 | 0.582 | 60 | 0 |
520 | 2 | 68 | 70 | 32 | 66 | 25.0 | 0.187 | 25 | 0 |
521 | 3 | 124 | 80 | 33 | 130 | 33.2 | 0.305 | 26 | 0 |
522 | 6 | 114 | 0 | 0 | 0 | 0.0 | 0.189 | 26 | 0 |
523 | 9 | 130 | 70 | 0 | 0 | 34.2 | 0.652 | 45 | 1 |
524 | 3 | 125 | 58 | 0 | 0 | 31.6 | 0.151 | 24 | 0 |
525 | 3 | 87 | 60 | 18 | 0 | 21.8 | 0.444 | 21 | 0 |
526 | 1 | 97 | 64 | 19 | 82 | 18.2 | 0.299 | 21 | 0 |
527 | 3 | 116 | 74 | 15 | 105 | 26.3 | 0.107 | 24 | 0 |
528 | 0 | 117 | 66 | 31 | 188 | 30.8 | 0.493 | 22 | 0 |
529 | 0 | 111 | 65 | 0 | 0 | 24.6 | 0.660 | 31 | 0 |
530 | 2 | 122 | 60 | 18 | 106 | 29.8 | 0.717 | 22 | 0 |
531 | 0 | 107 | 76 | 0 | 0 | 45.3 | 0.686 | 24 | 0 |
532 | 1 | 86 | 66 | 52 | 65 | 41.3 | 0.917 | 29 | 0 |
533 | 6 | 91 | 0 | 0 | 0 | 29.8 | 0.501 | 31 | 0 |
534 | 1 | 77 | 56 | 30 | 56 | 33.3 | 1.251 | 24 | 0 |
535 | 4 | 132 | 0 | 0 | 0 | 32.9 | 0.302 | 23 | 1 |
536 | 0 | 105 | 90 | 0 | 0 | 29.6 | 0.197 | 46 | 0 |
537 | 0 | 57 | 60 | 0 | 0 | 21.7 | 0.735 | 67 | 0 |
538 | 0 | 127 | 80 | 37 | 210 | 36.3 | 0.804 | 23 | 0 |
539 | 3 | 129 | 92 | 49 | 155 | 36.4 | 0.968 | 32 | 1 |
540 | 8 | 100 | 74 | 40 | 215 | 39.4 | 0.661 | 43 | 1 |
541 | 3 | 128 | 72 | 25 | 190 | 32.4 | 0.549 | 27 | 1 |
542 | 10 | 90 | 85 | 32 | 0 | 34.9 | 0.825 | 56 | 1 |
543 | 4 | 84 | 90 | 23 | 56 | 39.5 | 0.159 | 25 | 0 |
544 | 1 | 88 | 78 | 29 | 76 | 32.0 | 0.365 | 29 | 0 |
545 | 8 | 186 | 90 | 35 | 225 | 34.5 | 0.423 | 37 | 1 |
546 | 5 | 187 | 76 | 27 | 207 | 43.6 | 1.034 | 53 | 1 |
547 | 4 | 131 | 68 | 21 | 166 | 33.1 | 0.160 | 28 | 0 |
548 | 1 | 164 | 82 | 43 | 67 | 32.8 | 0.341 | 50 | 0 |
549 | 4 | 189 | 110 | 31 | 0 | 28.5 | 0.680 | 37 | 0 |
550 | 1 | 116 | 70 | 28 | 0 | 27.4 | 0.204 | 21 | 0 |
551 | 3 | 84 | 68 | 30 | 106 | 31.9 | 0.591 | 25 | 0 |
552 | 6 | 114 | 88 | 0 | 0 | 27.8 | 0.247 | 66 | 0 |
553 | 1 | 88 | 62 | 24 | 44 | 29.9 | 0.422 | 23 | 0 |
554 | 1 | 84 | 64 | 23 | 115 | 36.9 | 0.471 | 28 | 0 |
555 | 7 | 124 | 70 | 33 | 215 | 25.5 | 0.161 | 37 | 0 |
556 | 1 | 97 | 70 | 40 | 0 | 38.1 | 0.218 | 30 | 0 |
557 | 8 | 110 | 76 | 0 | 0 | 27.8 | 0.237 | 58 | 0 |
558 | 11 | 103 | 68 | 40 | 0 | 46.2 | 0.126 | 42 | 0 |
559 | 11 | 85 | 74 | 0 | 0 | 30.1 | 0.300 | 35 | 0 |
560 | 6 | 125 | 76 | 0 | 0 | 33.8 | 0.121 | 54 | 1 |
561 | 0 | 198 | 66 | 32 | 274 | 41.3 | 0.502 | 28 | 1 |
562 | 1 | 87 | 68 | 34 | 77 | 37.6 | 0.401 | 24 | 0 |
563 | 6 | 99 | 60 | 19 | 54 | 26.9 | 0.497 | 32 | 0 |
564 | 0 | 91 | 80 | 0 | 0 | 32.4 | 0.601 | 27 | 0 |
565 | 2 | 95 | 54 | 14 | 88 | 26.1 | 0.748 | 22 | 0 |
566 | 1 | 99 | 72 | 30 | 18 | 38.6 | 0.412 | 21 | 0 |
567 | 6 | 92 | 62 | 32 | 126 | 32.0 | 0.085 | 46 | 0 |
568 | 4 | 154 | 72 | 29 | 126 | 31.3 | 0.338 | 37 | 0 |
569 | 0 | 121 | 66 | 30 | 165 | 34.3 | 0.203 | 33 | 1 |
570 | 3 | 78 | 70 | 0 | 0 | 32.5 | 0.270 | 39 | 0 |
571 | 2 | 130 | 96 | 0 | 0 | 22.6 | 0.268 | 21 | 0 |
572 | 3 | 111 | 58 | 31 | 44 | 29.5 | 0.430 | 22 | 0 |
573 | 2 | 98 | 60 | 17 | 120 | 34.7 | 0.198 | 22 | 0 |
574 | 1 | 143 | 86 | 30 | 330 | 30.1 | 0.892 | 23 | 0 |
575 | 1 | 119 | 44 | 47 | 63 | 35.5 | 0.280 | 25 | 0 |
576 | 6 | 108 | 44 | 20 | 130 | 24.0 | 0.813 | 35 | 0 |
577 | 2 | 118 | 80 | 0 | 0 | 42.9 | 0.693 | 21 | 1 |
578 | 10 | 133 | 68 | 0 | 0 | 27.0 | 0.245 | 36 | 0 |
579 | 2 | 197 | 70 | 99 | 0 | 34.7 | 0.575 | 62 | 1 |
580 | 0 | 151 | 90 | 46 | 0 | 42.1 | 0.371 | 21 | 1 |
581 | 6 | 109 | 60 | 27 | 0 | 25.0 | 0.206 | 27 | 0 |
582 | 12 | 121 | 78 | 17 | 0 | 26.5 | 0.259 | 62 | 0 |
583 | 8 | 100 | 76 | 0 | 0 | 38.7 | 0.190 | 42 | 0 |
584 | 8 | 124 | 76 | 24 | 600 | 28.7 | 0.687 | 52 | 1 |
585 | 1 | 93 | 56 | 11 | 0 | 22.5 | 0.417 | 22 | 0 |
586 | 8 | 143 | 66 | 0 | 0 | 34.9 | 0.129 | 41 | 1 |
587 | 6 | 103 | 66 | 0 | 0 | 24.3 | 0.249 | 29 | 0 |
588 | 3 | 176 | 86 | 27 | 156 | 33.3 | 1.154 | 52 | 1 |
589 | 0 | 73 | 0 | 0 | 0 | 21.1 | 0.342 | 25 | 0 |
590 | 11 | 111 | 84 | 40 | 0 | 46.8 | 0.925 | 45 | 1 |
591 | 2 | 112 | 78 | 50 | 140 | 39.4 | 0.175 | 24 | 0 |
592 | 3 | 132 | 80 | 0 | 0 | 34.4 | 0.402 | 44 | 1 |
593 | 2 | 82 | 52 | 22 | 115 | 28.5 | 1.699 | 25 | 0 |
594 | 6 | 123 | 72 | 45 | 230 | 33.6 | 0.733 | 34 | 0 |
595 | 0 | 188 | 82 | 14 | 185 | 32.0 | 0.682 | 22 | 1 |
596 | 0 | 67 | 76 | 0 | 0 | 45.3 | 0.194 | 46 | 0 |
597 | 1 | 89 | 24 | 19 | 25 | 27.8 | 0.559 | 21 | 0 |
598 | 1 | 173 | 74 | 0 | 0 | 36.8 | 0.088 | 38 | 1 |
599 | 1 | 109 | 38 | 18 | 120 | 23.1 | 0.407 | 26 | 0 |
600 | 1 | 108 | 88 | 19 | 0 | 27.1 | 0.400 | 24 | 0 |
601 | 6 | 96 | 0 | 0 | 0 | 23.7 | 0.190 | 28 | 0 |
602 | 1 | 124 | 74 | 36 | 0 | 27.8 | 0.100 | 30 | 0 |
603 | 7 | 150 | 78 | 29 | 126 | 35.2 | 0.692 | 54 | 1 |
604 | 4 | 183 | 0 | 0 | 0 | 28.4 | 0.212 | 36 | 1 |
605 | 1 | 124 | 60 | 32 | 0 | 35.8 | 0.514 | 21 | 0 |
606 | 1 | 181 | 78 | 42 | 293 | 40.0 | 1.258 | 22 | 1 |
607 | 1 | 92 | 62 | 25 | 41 | 19.5 | 0.482 | 25 | 0 |
608 | 0 | 152 | 82 | 39 | 272 | 41.5 | 0.270 | 27 | 0 |
609 | 1 | 111 | 62 | 13 | 182 | 24.0 | 0.138 | 23 | 0 |
610 | 3 | 106 | 54 | 21 | 158 | 30.9 | 0.292 | 24 | 0 |
611 | 3 | 174 | 58 | 22 | 194 | 32.9 | 0.593 | 36 | 1 |
612 | 7 | 168 | 88 | 42 | 321 | 38.2 | 0.787 | 40 | 1 |
613 | 6 | 105 | 80 | 28 | 0 | 32.5 | 0.878 | 26 | 0 |
614 | 11 | 138 | 74 | 26 | 144 | 36.1 | 0.557 | 50 | 1 |
615 | 3 | 106 | 72 | 0 | 0 | 25.8 | 0.207 | 27 | 0 |
616 | 6 | 117 | 96 | 0 | 0 | 28.7 | 0.157 | 30 | 0 |
617 | 2 | 68 | 62 | 13 | 15 | 20.1 | 0.257 | 23 | 0 |
618 | 9 | 112 | 82 | 24 | 0 | 28.2 | 1.282 | 50 | 1 |
619 | 0 | 119 | 0 | 0 | 0 | 32.4 | 0.141 | 24 | 1 |
620 | 2 | 112 | 86 | 42 | 160 | 38.4 | 0.246 | 28 | 0 |
621 | 2 | 92 | 76 | 20 | 0 | 24.2 | 1.698 | 28 | 0 |
622 | 6 | 183 | 94 | 0 | 0 | 40.8 | 1.461 | 45 | 0 |
623 | 0 | 94 | 70 | 27 | 115 | 43.5 | 0.347 | 21 | 0 |
624 | 2 | 108 | 64 | 0 | 0 | 30.8 | 0.158 | 21 | 0 |
625 | 4 | 90 | 88 | 47 | 54 | 37.7 | 0.362 | 29 | 0 |
626 | 0 | 125 | 68 | 0 | 0 | 24.7 | 0.206 | 21 | 0 |
627 | 0 | 132 | 78 | 0 | 0 | 32.4 | 0.393 | 21 | 0 |
628 | 5 | 128 | 80 | 0 | 0 | 34.6 | 0.144 | 45 | 0 |
629 | 4 | 94 | 65 | 22 | 0 | 24.7 | 0.148 | 21 | 0 |
630 | 7 | 114 | 64 | 0 | 0 | 27.4 | 0.732 | 34 | 1 |
631 | 0 | 102 | 78 | 40 | 90 | 34.5 | 0.238 | 24 | 0 |
632 | 2 | 111 | 60 | 0 | 0 | 26.2 | 0.343 | 23 | 0 |
633 | 1 | 128 | 82 | 17 | 183 | 27.5 | 0.115 | 22 | 0 |
634 | 10 | 92 | 62 | 0 | 0 | 25.9 | 0.167 | 31 | 0 |
635 | 13 | 104 | 72 | 0 | 0 | 31.2 | 0.465 | 38 | 1 |
636 | 5 | 104 | 74 | 0 | 0 | 28.8 | 0.153 | 48 | 0 |
637 | 2 | 94 | 76 | 18 | 66 | 31.6 | 0.649 | 23 | 0 |
638 | 7 | 97 | 76 | 32 | 91 | 40.9 | 0.871 | 32 | 1 |
639 | 1 | 100 | 74 | 12 | 46 | 19.5 | 0.149 | 28 | 0 |
640 | 0 | 102 | 86 | 17 | 105 | 29.3 | 0.695 | 27 | 0 |
641 | 4 | 128 | 70 | 0 | 0 | 34.3 | 0.303 | 24 | 0 |
642 | 6 | 147 | 80 | 0 | 0 | 29.5 | 0.178 | 50 | 1 |
643 | 4 | 90 | 0 | 0 | 0 | 28.0 | 0.610 | 31 | 0 |
644 | 3 | 103 | 72 | 30 | 152 | 27.6 | 0.730 | 27 | 0 |
645 | 2 | 157 | 74 | 35 | 440 | 39.4 | 0.134 | 30 | 0 |
646 | 1 | 167 | 74 | 17 | 144 | 23.4 | 0.447 | 33 | 1 |
647 | 0 | 179 | 50 | 36 | 159 | 37.8 | 0.455 | 22 | 1 |
648 | 11 | 136 | 84 | 35 | 130 | 28.3 | 0.260 | 42 | 1 |
649 | 0 | 107 | 60 | 25 | 0 | 26.4 | 0.133 | 23 | 0 |
650 | 1 | 91 | 54 | 25 | 100 | 25.2 | 0.234 | 23 | 0 |
651 | 1 | 117 | 60 | 23 | 106 | 33.8 | 0.466 | 27 | 0 |
652 | 5 | 123 | 74 | 40 | 77 | 34.1 | 0.269 | 28 | 0 |
653 | 2 | 120 | 54 | 0 | 0 | 26.8 | 0.455 | 27 | 0 |
654 | 1 | 106 | 70 | 28 | 135 | 34.2 | 0.142 | 22 | 0 |
655 | 2 | 155 | 52 | 27 | 540 | 38.7 | 0.240 | 25 | 1 |
656 | 2 | 101 | 58 | 35 | 90 | 21.8 | 0.155 | 22 | 0 |
657 | 1 | 120 | 80 | 48 | 200 | 38.9 | 1.162 | 41 | 0 |
658 | 11 | 127 | 106 | 0 | 0 | 39.0 | 0.190 | 51 | 0 |
659 | 3 | 80 | 82 | 31 | 70 | 34.2 | 1.292 | 27 | 1 |
660 | 10 | 162 | 84 | 0 | 0 | 27.7 | 0.182 | 54 | 0 |
661 | 1 | 199 | 76 | 43 | 0 | 42.9 | 1.394 | 22 | 1 |
662 | 8 | 167 | 106 | 46 | 231 | 37.6 | 0.165 | 43 | 1 |
663 | 9 | 145 | 80 | 46 | 130 | 37.9 | 0.637 | 40 | 1 |
664 | 6 | 115 | 60 | 39 | 0 | 33.7 | 0.245 | 40 | 1 |
665 | 1 | 112 | 80 | 45 | 132 | 34.8 | 0.217 | 24 | 0 |
666 | 4 | 145 | 82 | 18 | 0 | 32.5 | 0.235 | 70 | 1 |
667 | 10 | 111 | 70 | 27 | 0 | 27.5 | 0.141 | 40 | 1 |
668 | 6 | 98 | 58 | 33 | 190 | 34.0 | 0.430 | 43 | 0 |
669 | 9 | 154 | 78 | 30 | 100 | 30.9 | 0.164 | 45 | 0 |
670 | 6 | 165 | 68 | 26 | 168 | 33.6 | 0.631 | 49 | 0 |
671 | 1 | 99 | 58 | 10 | 0 | 25.4 | 0.551 | 21 | 0 |
672 | 10 | 68 | 106 | 23 | 49 | 35.5 | 0.285 | 47 | 0 |
673 | 3 | 123 | 100 | 35 | 240 | 57.3 | 0.880 | 22 | 0 |
674 | 8 | 91 | 82 | 0 | 0 | 35.6 | 0.587 | 68 | 0 |
675 | 6 | 195 | 70 | 0 | 0 | 30.9 | 0.328 | 31 | 1 |
676 | 9 | 156 | 86 | 0 | 0 | 24.8 | 0.230 | 53 | 1 |
677 | 0 | 93 | 60 | 0 | 0 | 35.3 | 0.263 | 25 | 0 |
678 | 3 | 121 | 52 | 0 | 0 | 36.0 | 0.127 | 25 | 1 |
679 | 2 | 101 | 58 | 17 | 265 | 24.2 | 0.614 | 23 | 0 |
680 | 2 | 56 | 56 | 28 | 45 | 24.2 | 0.332 | 22 | 0 |
681 | 0 | 162 | 76 | 36 | 0 | 49.6 | 0.364 | 26 | 1 |
682 | 0 | 95 | 64 | 39 | 105 | 44.6 | 0.366 | 22 | 0 |
683 | 4 | 125 | 80 | 0 | 0 | 32.3 | 0.536 | 27 | 1 |
684 | 5 | 136 | 82 | 0 | 0 | 0.0 | 0.640 | 69 | 0 |
685 | 2 | 129 | 74 | 26 | 205 | 33.2 | 0.591 | 25 | 0 |
686 | 3 | 130 | 64 | 0 | 0 | 23.1 | 0.314 | 22 | 0 |
687 | 1 | 107 | 50 | 19 | 0 | 28.3 | 0.181 | 29 | 0 |
688 | 1 | 140 | 74 | 26 | 180 | 24.1 | 0.828 | 23 | 0 |
689 | 1 | 144 | 82 | 46 | 180 | 46.1 | 0.335 | 46 | 1 |
690 | 8 | 107 | 80 | 0 | 0 | 24.6 | 0.856 | 34 | 0 |
691 | 13 | 158 | 114 | 0 | 0 | 42.3 | 0.257 | 44 | 1 |
692 | 2 | 121 | 70 | 32 | 95 | 39.1 | 0.886 | 23 | 0 |
693 | 7 | 129 | 68 | 49 | 125 | 38.5 | 0.439 | 43 | 1 |
694 | 2 | 90 | 60 | 0 | 0 | 23.5 | 0.191 | 25 | 0 |
695 | 7 | 142 | 90 | 24 | 480 | 30.4 | 0.128 | 43 | 1 |
696 | 3 | 169 | 74 | 19 | 125 | 29.9 | 0.268 | 31 | 1 |
697 | 0 | 99 | 0 | 0 | 0 | 25.0 | 0.253 | 22 | 0 |
698 | 4 | 127 | 88 | 11 | 155 | 34.5 | 0.598 | 28 | 0 |
699 | 4 | 118 | 70 | 0 | 0 | 44.5 | 0.904 | 26 | 0 |
700 | 2 | 122 | 76 | 27 | 200 | 35.9 | 0.483 | 26 | 0 |
701 | 6 | 125 | 78 | 31 | 0 | 27.6 | 0.565 | 49 | 1 |
702 | 1 | 168 | 88 | 29 | 0 | 35.0 | 0.905 | 52 | 1 |
703 | 2 | 129 | 0 | 0 | 0 | 38.5 | 0.304 | 41 | 0 |
704 | 4 | 110 | 76 | 20 | 100 | 28.4 | 0.118 | 27 | 0 |
705 | 6 | 80 | 80 | 36 | 0 | 39.8 | 0.177 | 28 | 0 |
706 | 10 | 115 | 0 | 0 | 0 | 0.0 | 0.261 | 30 | 1 |
707 | 2 | 127 | 46 | 21 | 335 | 34.4 | 0.176 | 22 | 0 |
708 | 9 | 164 | 78 | 0 | 0 | 32.8 | 0.148 | 45 | 1 |
709 | 2 | 93 | 64 | 32 | 160 | 38.0 | 0.674 | 23 | 1 |
710 | 3 | 158 | 64 | 13 | 387 | 31.2 | 0.295 | 24 | 0 |
711 | 5 | 126 | 78 | 27 | 22 | 29.6 | 0.439 | 40 | 0 |
712 | 10 | 129 | 62 | 36 | 0 | 41.2 | 0.441 | 38 | 1 |
713 | 0 | 134 | 58 | 20 | 291 | 26.4 | 0.352 | 21 | 0 |
714 | 3 | 102 | 74 | 0 | 0 | 29.5 | 0.121 | 32 | 0 |
715 | 7 | 187 | 50 | 33 | 392 | 33.9 | 0.826 | 34 | 1 |
716 | 3 | 173 | 78 | 39 | 185 | 33.8 | 0.970 | 31 | 1 |
717 | 10 | 94 | 72 | 18 | 0 | 23.1 | 0.595 | 56 | 0 |
718 | 1 | 108 | 60 | 46 | 178 | 35.5 | 0.415 | 24 | 0 |
719 | 5 | 97 | 76 | 27 | 0 | 35.6 | 0.378 | 52 | 1 |
720 | 4 | 83 | 86 | 19 | 0 | 29.3 | 0.317 | 34 | 0 |
721 | 1 | 114 | 66 | 36 | 200 | 38.1 | 0.289 | 21 | 0 |
722 | 1 | 149 | 68 | 29 | 127 | 29.3 | 0.349 | 42 | 1 |
723 | 5 | 117 | 86 | 30 | 105 | 39.1 | 0.251 | 42 | 0 |
724 | 1 | 111 | 94 | 0 | 0 | 32.8 | 0.265 | 45 | 0 |
725 | 4 | 112 | 78 | 40 | 0 | 39.4 | 0.236 | 38 | 0 |
726 | 1 | 116 | 78 | 29 | 180 | 36.1 | 0.496 | 25 | 0 |
727 | 0 | 141 | 84 | 26 | 0 | 32.4 | 0.433 | 22 | 0 |
728 | 2 | 175 | 88 | 0 | 0 | 22.9 | 0.326 | 22 | 0 |
729 | 2 | 92 | 52 | 0 | 0 | 30.1 | 0.141 | 22 | 0 |
730 | 3 | 130 | 78 | 23 | 79 | 28.4 | 0.323 | 34 | 1 |
731 | 8 | 120 | 86 | 0 | 0 | 28.4 | 0.259 | 22 | 1 |
732 | 2 | 174 | 88 | 37 | 120 | 44.5 | 0.646 | 24 | 1 |
733 | 2 | 106 | 56 | 27 | 165 | 29.0 | 0.426 | 22 | 0 |
734 | 2 | 105 | 75 | 0 | 0 | 23.3 | 0.560 | 53 | 0 |
735 | 4 | 95 | 60 | 32 | 0 | 35.4 | 0.284 | 28 | 0 |
736 | 0 | 126 | 86 | 27 | 120 | 27.4 | 0.515 | 21 | 0 |
737 | 8 | 65 | 72 | 23 | 0 | 32.0 | 0.600 | 42 | 0 |
738 | 2 | 99 | 60 | 17 | 160 | 36.6 | 0.453 | 21 | 0 |
739 | 1 | 102 | 74 | 0 | 0 | 39.5 | 0.293 | 42 | 1 |
740 | 11 | 120 | 80 | 37 | 150 | 42.3 | 0.785 | 48 | 1 |
741 | 3 | 102 | 44 | 20 | 94 | 30.8 | 0.400 | 26 | 0 |
742 | 1 | 109 | 58 | 18 | 116 | 28.5 | 0.219 | 22 | 0 |
743 | 9 | 140 | 94 | 0 | 0 | 32.7 | 0.734 | 45 | 1 |
744 | 13 | 153 | 88 | 37 | 140 | 40.6 | 1.174 | 39 | 0 |
745 | 12 | 100 | 84 | 33 | 105 | 30.0 | 0.488 | 46 | 0 |
746 | 1 | 147 | 94 | 41 | 0 | 49.3 | 0.358 | 27 | 1 |
747 | 1 | 81 | 74 | 41 | 57 | 46.3 | 1.096 | 32 | 0 |
748 | 3 | 187 | 70 | 22 | 200 | 36.4 | 0.408 | 36 | 1 |
749 | 6 | 162 | 62 | 0 | 0 | 24.3 | 0.178 | 50 | 1 |
750 | 4 | 136 | 70 | 0 | 0 | 31.2 | 1.182 | 22 | 1 |
751 | 1 | 121 | 78 | 39 | 74 | 39.0 | 0.261 | 28 | 0 |
752 | 3 | 108 | 62 | 24 | 0 | 26.0 | 0.223 | 25 | 0 |
753 | 0 | 181 | 88 | 44 | 510 | 43.3 | 0.222 | 26 | 1 |
754 | 8 | 154 | 78 | 32 | 0 | 32.4 | 0.443 | 45 | 1 |
755 | 1 | 128 | 88 | 39 | 110 | 36.5 | 1.057 | 37 | 1 |
756 | 7 | 137 | 90 | 41 | 0 | 32.0 | 0.391 | 39 | 0 |
757 | 0 | 123 | 72 | 0 | 0 | 36.3 | 0.258 | 52 | 1 |
758 | 1 | 106 | 76 | 0 | 0 | 37.5 | 0.197 | 26 | 0 |
759 | 6 | 190 | 92 | 0 | 0 | 35.5 | 0.278 | 66 | 1 |
760 | 2 | 88 | 58 | 26 | 16 | 28.4 | 0.766 | 22 | 0 |
761 | 9 | 170 | 74 | 31 | 0 | 44.0 | 0.403 | 43 | 1 |
762 | 9 | 89 | 62 | 0 | 0 | 22.5 | 0.142 | 33 | 0 |
763 | 10 | 101 | 76 | 48 | 180 | 32.9 | 0.171 | 63 | 0 |
764 | 2 | 122 | 70 | 27 | 0 | 36.8 | 0.340 | 27 | 0 |
765 | 5 | 121 | 72 | 23 | 112 | 26.2 | 0.245 | 30 | 0 |
766 | 1 | 126 | 60 | 0 | 0 | 30.1 | 0.349 | 47 | 1 |
767 | 1 | 93 | 70 | 31 | 0 | 30.4 | 0.315 | 23 | 0 |
datasets_imputed = praproses(raw_data)
datasets_imputed
Pregnancies | Glucose | BloodPressure | SkinThickness | Insulin | BMI | DiabetesPedigreeFunction | Age | Outcome | |
---|---|---|---|---|---|---|---|---|---|
0 | 6.0 | 148.0 | 72.0 | 35.0 | 134.0 | 33.6 | 0.627 | 50.0 | 1.0 |
1 | 1.0 | 85.0 | 66.0 | 29.0 | 67.0 | 26.6 | 0.351 | 31.0 | 0.0 |
2 | 8.0 | 183.0 | 64.0 | 30.0 | 208.0 | 23.3 | 0.672 | 32.0 | 1.0 |
3 | 1.0 | 89.0 | 66.0 | 23.0 | 94.0 | 28.1 | 0.167 | 21.0 | 0.0 |
4 | 0.0 | 137.0 | 40.0 | 35.0 | 168.0 | 43.1 | 2.288 | 33.0 | 1.0 |
5 | 5.0 | 116.0 | 74.0 | 18.0 | 109.0 | 25.6 | 0.201 | 30.0 | 0.0 |
6 | 3.0 | 78.0 | 50.0 | 32.0 | 88.0 | 31.0 | 0.248 | 26.0 | 1.0 |
7 | 10.0 | 115.0 | 75.0 | 35.0 | 118.0 | 35.3 | 0.134 | 29.0 | 0.0 |
8 | 2.0 | 197.0 | 70.0 | 45.0 | 543.0 | 30.5 | 0.158 | 53.0 | 1.0 |
9 | 8.0 | 125.0 | 96.0 | 32.0 | 145.0 | 38.7 | 0.232 | 54.0 | 1.0 |
10 | 4.0 | 110.0 | 92.0 | 31.0 | 136.0 | 37.6 | 0.191 | 30.0 | 0.0 |
11 | 10.0 | 168.0 | 74.0 | 37.0 | 159.0 | 38.0 | 0.537 | 34.0 | 1.0 |
12 | 10.0 | 139.0 | 80.0 | 20.0 | 190.0 | 27.1 | 1.441 | 57.0 | 0.0 |
13 | 1.0 | 189.0 | 60.0 | 23.0 | 846.0 | 30.1 | 0.398 | 59.0 | 1.0 |
14 | 5.0 | 166.0 | 72.0 | 19.0 | 175.0 | 25.8 | 0.587 | 51.0 | 1.0 |
15 | 7.0 | 100.0 | 71.0 | 30.0 | 132.0 | 30.0 | 0.484 | 32.0 | 1.0 |
16 | 0.0 | 118.0 | 84.0 | 47.0 | 230.0 | 45.8 | 0.551 | 31.0 | 1.0 |
17 | 7.0 | 107.0 | 74.0 | 38.0 | 154.0 | 29.6 | 0.254 | 31.0 | 1.0 |
18 | 1.0 | 103.0 | 30.0 | 38.0 | 83.0 | 43.3 | 0.183 | 33.0 | 0.0 |
19 | 1.0 | 115.0 | 70.0 | 30.0 | 96.0 | 34.6 | 0.529 | 32.0 | 1.0 |
20 | 3.0 | 126.0 | 88.0 | 41.0 | 235.0 | 39.3 | 0.704 | 27.0 | 0.0 |
21 | 8.0 | 99.0 | 84.0 | 30.0 | 92.0 | 35.4 | 0.388 | 50.0 | 0.0 |
22 | 7.0 | 196.0 | 90.0 | 33.0 | 169.0 | 39.8 | 0.451 | 41.0 | 1.0 |
23 | 9.0 | 119.0 | 80.0 | 35.0 | 128.0 | 29.0 | 0.263 | 29.0 | 1.0 |
24 | 11.0 | 143.0 | 94.0 | 33.0 | 146.0 | 36.6 | 0.254 | 51.0 | 1.0 |
25 | 10.0 | 125.0 | 70.0 | 26.0 | 115.0 | 31.1 | 0.205 | 41.0 | 1.0 |
26 | 7.0 | 147.0 | 76.0 | 40.0 | 204.0 | 39.4 | 0.257 | 43.0 | 1.0 |
27 | 1.0 | 97.0 | 66.0 | 15.0 | 140.0 | 23.2 | 0.487 | 22.0 | 0.0 |
28 | 13.0 | 145.0 | 82.0 | 19.0 | 110.0 | 22.2 | 0.245 | 57.0 | 0.0 |
29 | 5.0 | 117.0 | 92.0 | 26.0 | 114.0 | 34.1 | 0.337 | 38.0 | 0.0 |
30 | 5.0 | 109.0 | 75.0 | 26.0 | 133.0 | 36.0 | 0.546 | 60.0 | 0.0 |
31 | 3.0 | 158.0 | 76.0 | 36.0 | 245.0 | 31.6 | 0.851 | 28.0 | 1.0 |
32 | 3.0 | 88.0 | 58.0 | 11.0 | 54.0 | 24.8 | 0.267 | 22.0 | 0.0 |
33 | 6.0 | 92.0 | 92.0 | 16.0 | 68.0 | 19.9 | 0.188 | 28.0 | 0.0 |
34 | 10.0 | 122.0 | 78.0 | 31.0 | 104.0 | 27.6 | 0.512 | 45.0 | 0.0 |
35 | 4.0 | 103.0 | 60.0 | 33.0 | 192.0 | 24.0 | 0.966 | 33.0 | 0.0 |
36 | 11.0 | 138.0 | 76.0 | 29.0 | 214.0 | 33.2 | 0.420 | 35.0 | 0.0 |
37 | 9.0 | 102.0 | 76.0 | 37.0 | 191.0 | 32.9 | 0.665 | 46.0 | 1.0 |
38 | 2.0 | 90.0 | 68.0 | 42.0 | 105.0 | 38.2 | 0.503 | 27.0 | 1.0 |
39 | 4.0 | 111.0 | 72.0 | 47.0 | 207.0 | 37.1 | 1.390 | 56.0 | 1.0 |
40 | 3.0 | 180.0 | 64.0 | 25.0 | 70.0 | 34.0 | 0.271 | 26.0 | 0.0 |
41 | 7.0 | 133.0 | 84.0 | 25.0 | 144.0 | 40.2 | 0.696 | 37.0 | 0.0 |
42 | 7.0 | 106.0 | 92.0 | 18.0 | 83.0 | 22.7 | 0.235 | 48.0 | 0.0 |
43 | 9.0 | 171.0 | 110.0 | 24.0 | 240.0 | 45.4 | 0.721 | 54.0 | 1.0 |
44 | 7.0 | 159.0 | 64.0 | 34.0 | 220.0 | 27.4 | 0.294 | 40.0 | 0.0 |
45 | 0.0 | 180.0 | 66.0 | 39.0 | 366.0 | 42.0 | 1.893 | 25.0 | 1.0 |
46 | 1.0 | 146.0 | 56.0 | 31.0 | 129.0 | 29.7 | 0.564 | 29.0 | 0.0 |
47 | 2.0 | 71.0 | 70.0 | 27.0 | 45.0 | 28.0 | 0.586 | 22.0 | 0.0 |
48 | 7.0 | 103.0 | 66.0 | 32.0 | 98.0 | 39.1 | 0.344 | 31.0 | 1.0 |
49 | 7.0 | 105.0 | 69.0 | 26.0 | 192.0 | 33.4 | 0.305 | 24.0 | 0.0 |
50 | 1.0 | 103.0 | 80.0 | 11.0 | 82.0 | 19.4 | 0.491 | 22.0 | 0.0 |
51 | 1.0 | 101.0 | 50.0 | 15.0 | 36.0 | 24.2 | 0.526 | 26.0 | 0.0 |
52 | 5.0 | 88.0 | 66.0 | 21.0 | 23.0 | 24.4 | 0.342 | 30.0 | 0.0 |
53 | 8.0 | 176.0 | 90.0 | 34.0 | 300.0 | 33.7 | 0.467 | 58.0 | 1.0 |
54 | 7.0 | 150.0 | 66.0 | 42.0 | 342.0 | 34.7 | 0.718 | 42.0 | 0.0 |
55 | 1.0 | 73.0 | 50.0 | 10.0 | 42.0 | 23.0 | 0.248 | 21.0 | 0.0 |
56 | 7.0 | 187.0 | 68.0 | 39.0 | 304.0 | 37.7 | 0.254 | 41.0 | 1.0 |
57 | 0.0 | 100.0 | 88.0 | 60.0 | 110.0 | 46.8 | 0.962 | 31.0 | 0.0 |
58 | 0.0 | 146.0 | 82.0 | 35.0 | 291.0 | 40.5 | 1.781 | 44.0 | 0.0 |
59 | 0.0 | 105.0 | 64.0 | 41.0 | 142.0 | 41.5 | 0.173 | 22.0 | 0.0 |
60 | 2.0 | 84.0 | 63.0 | 22.0 | 50.0 | 30.1 | 0.304 | 21.0 | 0.0 |
61 | 8.0 | 133.0 | 72.0 | 39.0 | 117.0 | 32.9 | 0.270 | 39.0 | 1.0 |
62 | 5.0 | 44.0 | 62.0 | 28.0 | 45.0 | 25.0 | 0.587 | 36.0 | 0.0 |
63 | 2.0 | 141.0 | 58.0 | 34.0 | 128.0 | 25.4 | 0.699 | 24.0 | 0.0 |
64 | 7.0 | 114.0 | 66.0 | 36.0 | 111.0 | 32.8 | 0.258 | 42.0 | 1.0 |
65 | 5.0 | 99.0 | 74.0 | 27.0 | 102.0 | 29.0 | 0.203 | 32.0 | 0.0 |
66 | 0.0 | 109.0 | 88.0 | 30.0 | 167.0 | 32.5 | 0.855 | 38.0 | 1.0 |
67 | 2.0 | 109.0 | 92.0 | 31.0 | 138.0 | 42.7 | 0.845 | 54.0 | 0.0 |
68 | 1.0 | 95.0 | 66.0 | 13.0 | 38.0 | 19.6 | 0.334 | 25.0 | 0.0 |
69 | 4.0 | 146.0 | 85.0 | 27.0 | 100.0 | 28.9 | 0.189 | 27.0 | 0.0 |
70 | 2.0 | 100.0 | 66.0 | 20.0 | 90.0 | 32.9 | 0.867 | 28.0 | 1.0 |
71 | 5.0 | 139.0 | 64.0 | 35.0 | 140.0 | 28.6 | 0.411 | 26.0 | 0.0 |
72 | 13.0 | 126.0 | 90.0 | 35.0 | 210.0 | 43.4 | 0.583 | 42.0 | 1.0 |
73 | 4.0 | 129.0 | 86.0 | 20.0 | 270.0 | 35.1 | 0.231 | 23.0 | 0.0 |
74 | 1.0 | 79.0 | 75.0 | 30.0 | 61.0 | 32.0 | 0.396 | 22.0 | 0.0 |
75 | 1.0 | 86.0 | 48.0 | 20.0 | 178.0 | 24.7 | 0.140 | 22.0 | 0.0 |
76 | 7.0 | 62.0 | 78.0 | 29.0 | 52.0 | 32.6 | 0.391 | 41.0 | 0.0 |
77 | 5.0 | 95.0 | 72.0 | 33.0 | 105.0 | 37.7 | 0.370 | 27.0 | 0.0 |
78 | 0.0 | 131.0 | 65.0 | 41.0 | 209.0 | 43.2 | 0.270 | 26.0 | 1.0 |
79 | 2.0 | 112.0 | 66.0 | 22.0 | 100.0 | 25.0 | 0.307 | 24.0 | 0.0 |
80 | 3.0 | 113.0 | 44.0 | 13.0 | 85.0 | 22.4 | 0.140 | 22.0 | 0.0 |
81 | 2.0 | 74.0 | 61.0 | 28.0 | 35.0 | 28.8 | 0.102 | 22.0 | 0.0 |
82 | 7.0 | 83.0 | 78.0 | 26.0 | 71.0 | 29.3 | 0.767 | 36.0 | 0.0 |
83 | 0.0 | 101.0 | 65.0 | 28.0 | 74.0 | 24.6 | 0.237 | 22.0 | 0.0 |
84 | 5.0 | 137.0 | 108.0 | 33.0 | 118.0 | 48.8 | 0.227 | 37.0 | 1.0 |
85 | 2.0 | 110.0 | 74.0 | 29.0 | 125.0 | 32.4 | 0.698 | 27.0 | 0.0 |
86 | 13.0 | 106.0 | 72.0 | 54.0 | 120.0 | 36.6 | 0.178 | 45.0 | 0.0 |
87 | 2.0 | 100.0 | 68.0 | 25.0 | 71.0 | 38.5 | 0.324 | 26.0 | 0.0 |
88 | 15.0 | 136.0 | 70.0 | 32.0 | 110.0 | 37.1 | 0.153 | 43.0 | 1.0 |
89 | 1.0 | 107.0 | 68.0 | 19.0 | 65.0 | 26.5 | 0.165 | 24.0 | 0.0 |
90 | 1.0 | 80.0 | 55.0 | 14.0 | 88.0 | 19.1 | 0.258 | 21.0 | 0.0 |
91 | 4.0 | 123.0 | 80.0 | 15.0 | 176.0 | 32.0 | 0.443 | 34.0 | 0.0 |
92 | 7.0 | 81.0 | 78.0 | 40.0 | 48.0 | 46.7 | 0.261 | 42.0 | 0.0 |
93 | 4.0 | 134.0 | 72.0 | 27.0 | 343.0 | 23.8 | 0.277 | 60.0 | 1.0 |
94 | 2.0 | 142.0 | 82.0 | 18.0 | 64.0 | 24.7 | 0.761 | 21.0 | 0.0 |
95 | 6.0 | 144.0 | 72.0 | 27.0 | 228.0 | 33.9 | 0.255 | 40.0 | 0.0 |
96 | 2.0 | 92.0 | 62.0 | 28.0 | 74.0 | 31.6 | 0.130 | 24.0 | 0.0 |
97 | 1.0 | 71.0 | 48.0 | 18.0 | 76.0 | 20.4 | 0.323 | 22.0 | 0.0 |
98 | 6.0 | 93.0 | 50.0 | 30.0 | 64.0 | 28.7 | 0.356 | 23.0 | 0.0 |
99 | 1.0 | 122.0 | 90.0 | 51.0 | 220.0 | 49.7 | 0.325 | 31.0 | 1.0 |
100 | 1.0 | 163.0 | 72.0 | 27.0 | 226.0 | 39.0 | 1.222 | 33.0 | 1.0 |
101 | 1.0 | 151.0 | 60.0 | 23.0 | 266.0 | 26.1 | 0.179 | 22.0 | 0.0 |
102 | 0.0 | 125.0 | 96.0 | 25.0 | 135.0 | 22.5 | 0.262 | 21.0 | 0.0 |
103 | 1.0 | 81.0 | 72.0 | 18.0 | 40.0 | 26.6 | 0.283 | 24.0 | 0.0 |
104 | 2.0 | 85.0 | 65.0 | 34.0 | 82.0 | 39.6 | 0.930 | 27.0 | 0.0 |
105 | 1.0 | 126.0 | 56.0 | 29.0 | 152.0 | 28.7 | 0.801 | 21.0 | 0.0 |
106 | 1.0 | 96.0 | 122.0 | 34.0 | 85.0 | 22.4 | 0.207 | 27.0 | 0.0 |
107 | 4.0 | 144.0 | 58.0 | 28.0 | 140.0 | 29.5 | 0.287 | 37.0 | 0.0 |
108 | 3.0 | 83.0 | 58.0 | 31.0 | 18.0 | 34.3 | 0.336 | 25.0 | 0.0 |
109 | 0.0 | 95.0 | 85.0 | 25.0 | 36.0 | 37.4 | 0.247 | 24.0 | 1.0 |
110 | 3.0 | 171.0 | 72.0 | 33.0 | 135.0 | 33.3 | 0.199 | 24.0 | 1.0 |
111 | 8.0 | 155.0 | 62.0 | 26.0 | 495.0 | 34.0 | 0.543 | 46.0 | 1.0 |
112 | 1.0 | 89.0 | 76.0 | 34.0 | 37.0 | 31.2 | 0.192 | 23.0 | 0.0 |
113 | 4.0 | 76.0 | 62.0 | 30.0 | 40.0 | 34.0 | 0.391 | 25.0 | 0.0 |
114 | 7.0 | 160.0 | 54.0 | 32.0 | 175.0 | 30.5 | 0.588 | 39.0 | 1.0 |
115 | 4.0 | 146.0 | 92.0 | 26.0 | 199.0 | 31.2 | 0.539 | 61.0 | 1.0 |
116 | 5.0 | 124.0 | 74.0 | 32.0 | 105.0 | 34.0 | 0.220 | 38.0 | 1.0 |
117 | 5.0 | 78.0 | 48.0 | 25.0 | 82.0 | 33.7 | 0.654 | 25.0 | 0.0 |
118 | 4.0 | 97.0 | 60.0 | 23.0 | 79.0 | 28.2 | 0.443 | 22.0 | 0.0 |
119 | 4.0 | 99.0 | 76.0 | 15.0 | 51.0 | 23.2 | 0.223 | 21.0 | 0.0 |
120 | 0.0 | 162.0 | 76.0 | 56.0 | 100.0 | 53.2 | 0.759 | 25.0 | 1.0 |
121 | 6.0 | 111.0 | 64.0 | 39.0 | 145.0 | 34.2 | 0.260 | 24.0 | 0.0 |
122 | 2.0 | 107.0 | 74.0 | 30.0 | 100.0 | 33.6 | 0.404 | 23.0 | 0.0 |
123 | 5.0 | 132.0 | 80.0 | 24.0 | 174.0 | 26.8 | 0.186 | 69.0 | 0.0 |
124 | 0.0 | 113.0 | 76.0 | 35.0 | 134.0 | 33.3 | 0.278 | 23.0 | 1.0 |
125 | 1.0 | 88.0 | 30.0 | 42.0 | 99.0 | 55.0 | 0.496 | 26.0 | 1.0 |
126 | 3.0 | 120.0 | 70.0 | 30.0 | 135.0 | 42.9 | 0.452 | 30.0 | 0.0 |
127 | 1.0 | 118.0 | 58.0 | 36.0 | 94.0 | 33.3 | 0.261 | 23.0 | 0.0 |
128 | 1.0 | 117.0 | 88.0 | 24.0 | 145.0 | 34.5 | 0.403 | 40.0 | 1.0 |
129 | 0.0 | 105.0 | 84.0 | 33.0 | 181.0 | 27.9 | 0.741 | 62.0 | 1.0 |
130 | 4.0 | 173.0 | 70.0 | 14.0 | 168.0 | 29.7 | 0.361 | 33.0 | 1.0 |
131 | 9.0 | 122.0 | 56.0 | 24.0 | 166.0 | 33.3 | 1.114 | 33.0 | 1.0 |
132 | 3.0 | 170.0 | 64.0 | 37.0 | 225.0 | 34.5 | 0.356 | 30.0 | 1.0 |
133 | 8.0 | 84.0 | 74.0 | 31.0 | 58.0 | 38.3 | 0.457 | 39.0 | 0.0 |
134 | 2.0 | 96.0 | 68.0 | 13.0 | 49.0 | 21.1 | 0.647 | 26.0 | 0.0 |
135 | 2.0 | 125.0 | 60.0 | 20.0 | 140.0 | 33.8 | 0.088 | 31.0 | 0.0 |
136 | 0.0 | 100.0 | 70.0 | 26.0 | 50.0 | 30.8 | 0.597 | 21.0 | 0.0 |
137 | 0.0 | 93.0 | 60.0 | 25.0 | 92.0 | 28.7 | 0.532 | 22.0 | 0.0 |
138 | 0.0 | 129.0 | 80.0 | 29.0 | 183.0 | 31.2 | 0.703 | 29.0 | 0.0 |
139 | 5.0 | 105.0 | 72.0 | 29.0 | 325.0 | 36.9 | 0.159 | 28.0 | 0.0 |
140 | 3.0 | 128.0 | 78.0 | 20.0 | 172.0 | 21.1 | 0.268 | 55.0 | 0.0 |
141 | 5.0 | 106.0 | 82.0 | 30.0 | 166.0 | 39.5 | 0.286 | 38.0 | 0.0 |
142 | 2.0 | 108.0 | 52.0 | 26.0 | 63.0 | 32.5 | 0.318 | 22.0 | 0.0 |
143 | 10.0 | 108.0 | 66.0 | 29.0 | 162.0 | 32.4 | 0.272 | 42.0 | 1.0 |
144 | 4.0 | 154.0 | 62.0 | 31.0 | 284.0 | 32.8 | 0.237 | 23.0 | 0.0 |
145 | 0.0 | 102.0 | 75.0 | 23.0 | 30.0 | 25.0 | 0.572 | 21.0 | 0.0 |
146 | 9.0 | 57.0 | 80.0 | 37.0 | 52.0 | 32.8 | 0.096 | 41.0 | 0.0 |
147 | 2.0 | 106.0 | 64.0 | 35.0 | 119.0 | 30.5 | 1.400 | 34.0 | 0.0 |
148 | 5.0 | 147.0 | 78.0 | 20.0 | 145.0 | 33.7 | 0.218 | 65.0 | 0.0 |
149 | 2.0 | 90.0 | 70.0 | 17.0 | 51.0 | 27.3 | 0.085 | 22.0 | 0.0 |
150 | 1.0 | 136.0 | 74.0 | 50.0 | 204.0 | 37.4 | 0.399 | 24.0 | 0.0 |
151 | 4.0 | 114.0 | 65.0 | 22.0 | 140.0 | 21.9 | 0.432 | 37.0 | 0.0 |
152 | 9.0 | 156.0 | 86.0 | 28.0 | 155.0 | 34.3 | 1.189 | 42.0 | 1.0 |
153 | 1.0 | 153.0 | 82.0 | 42.0 | 485.0 | 40.6 | 0.687 | 23.0 | 0.0 |
154 | 8.0 | 188.0 | 78.0 | 33.0 | 237.0 | 47.9 | 0.137 | 43.0 | 1.0 |
155 | 7.0 | 152.0 | 88.0 | 44.0 | 173.0 | 50.0 | 0.337 | 36.0 | 1.0 |
156 | 2.0 | 99.0 | 52.0 | 15.0 | 94.0 | 24.6 | 0.637 | 21.0 | 0.0 |
157 | 1.0 | 109.0 | 56.0 | 21.0 | 135.0 | 25.2 | 0.833 | 23.0 | 0.0 |
158 | 2.0 | 88.0 | 74.0 | 19.0 | 53.0 | 29.0 | 0.229 | 22.0 | 0.0 |
159 | 17.0 | 163.0 | 72.0 | 41.0 | 114.0 | 40.9 | 0.817 | 47.0 | 1.0 |
160 | 4.0 | 151.0 | 90.0 | 38.0 | 262.0 | 29.7 | 0.294 | 36.0 | 0.0 |
161 | 7.0 | 102.0 | 74.0 | 40.0 | 105.0 | 37.2 | 0.204 | 45.0 | 0.0 |
162 | 0.0 | 114.0 | 80.0 | 34.0 | 285.0 | 44.2 | 0.167 | 27.0 | 0.0 |
163 | 2.0 | 100.0 | 64.0 | 23.0 | 84.0 | 29.7 | 0.368 | 21.0 | 0.0 |
164 | 0.0 | 131.0 | 88.0 | 43.0 | 118.0 | 31.6 | 0.743 | 32.0 | 1.0 |
165 | 6.0 | 104.0 | 74.0 | 18.0 | 156.0 | 29.9 | 0.722 | 41.0 | 1.0 |
166 | 3.0 | 148.0 | 66.0 | 25.0 | 142.0 | 32.5 | 0.256 | 22.0 | 0.0 |
167 | 4.0 | 120.0 | 68.0 | 33.0 | 154.0 | 29.6 | 0.709 | 34.0 | 0.0 |
168 | 4.0 | 110.0 | 66.0 | 28.0 | 118.0 | 31.9 | 0.471 | 29.0 | 0.0 |
169 | 3.0 | 111.0 | 90.0 | 12.0 | 78.0 | 28.4 | 0.495 | 29.0 | 0.0 |
170 | 6.0 | 102.0 | 82.0 | 32.0 | 174.0 | 30.8 | 0.180 | 36.0 | 1.0 |
171 | 6.0 | 134.0 | 70.0 | 23.0 | 130.0 | 35.4 | 0.542 | 29.0 | 1.0 |
172 | 2.0 | 87.0 | 59.0 | 23.0 | 36.0 | 28.9 | 0.773 | 25.0 | 0.0 |
173 | 1.0 | 79.0 | 60.0 | 42.0 | 48.0 | 43.5 | 0.678 | 23.0 | 0.0 |
174 | 2.0 | 75.0 | 64.0 | 24.0 | 55.0 | 29.7 | 0.370 | 33.0 | 0.0 |
175 | 8.0 | 179.0 | 72.0 | 42.0 | 130.0 | 32.7 | 0.719 | 36.0 | 1.0 |
176 | 6.0 | 85.0 | 78.0 | 24.0 | 70.0 | 31.2 | 0.382 | 42.0 | 0.0 |
177 | 0.0 | 129.0 | 110.0 | 46.0 | 130.0 | 67.1 | 0.319 | 26.0 | 1.0 |
178 | 5.0 | 143.0 | 78.0 | 34.0 | 223.0 | 45.0 | 0.190 | 47.0 | 0.0 |
179 | 5.0 | 130.0 | 82.0 | 37.0 | 127.0 | 39.1 | 0.956 | 37.0 | 1.0 |
180 | 6.0 | 87.0 | 80.0 | 25.0 | 67.0 | 23.2 | 0.084 | 32.0 | 0.0 |
181 | 0.0 | 119.0 | 64.0 | 18.0 | 92.0 | 34.9 | 0.725 | 23.0 | 0.0 |
182 | 1.0 | 79.0 | 74.0 | 20.0 | 23.0 | 27.7 | 0.299 | 21.0 | 0.0 |
183 | 5.0 | 73.0 | 60.0 | 25.0 | 38.0 | 26.8 | 0.268 | 27.0 | 0.0 |
184 | 4.0 | 141.0 | 74.0 | 28.0 | 185.0 | 27.6 | 0.244 | 40.0 | 0.0 |
185 | 7.0 | 194.0 | 68.0 | 28.0 | 276.0 | 35.9 | 0.745 | 41.0 | 1.0 |
186 | 8.0 | 181.0 | 68.0 | 36.0 | 495.0 | 30.1 | 0.615 | 60.0 | 1.0 |
187 | 1.0 | 128.0 | 98.0 | 41.0 | 58.0 | 32.0 | 1.321 | 33.0 | 1.0 |
188 | 8.0 | 109.0 | 76.0 | 39.0 | 114.0 | 27.9 | 0.640 | 31.0 | 1.0 |
189 | 5.0 | 139.0 | 80.0 | 35.0 | 160.0 | 31.6 | 0.361 | 25.0 | 1.0 |
190 | 3.0 | 111.0 | 62.0 | 18.0 | 172.0 | 22.6 | 0.142 | 21.0 | 0.0 |
191 | 9.0 | 123.0 | 70.0 | 44.0 | 94.0 | 33.1 | 0.374 | 40.0 | 0.0 |
192 | 7.0 | 159.0 | 66.0 | 30.0 | 309.0 | 30.4 | 0.383 | 36.0 | 1.0 |
193 | 11.0 | 135.0 | 83.0 | 35.0 | 225.0 | 52.3 | 0.578 | 40.0 | 1.0 |
194 | 8.0 | 85.0 | 55.0 | 20.0 | 50.0 | 24.4 | 0.136 | 42.0 | 0.0 |
195 | 5.0 | 158.0 | 84.0 | 41.0 | 210.0 | 39.4 | 0.395 | 29.0 | 1.0 |
196 | 1.0 | 105.0 | 58.0 | 17.0 | 179.0 | 24.3 | 0.187 | 21.0 | 0.0 |
197 | 3.0 | 107.0 | 62.0 | 13.0 | 48.0 | 22.9 | 0.678 | 23.0 | 1.0 |
198 | 4.0 | 109.0 | 64.0 | 44.0 | 99.0 | 34.8 | 0.905 | 26.0 | 1.0 |
199 | 4.0 | 148.0 | 60.0 | 27.0 | 318.0 | 30.9 | 0.150 | 29.0 | 1.0 |
200 | 0.0 | 113.0 | 80.0 | 16.0 | 76.0 | 31.0 | 0.874 | 21.0 | 0.0 |
201 | 1.0 | 138.0 | 82.0 | 38.0 | 236.0 | 40.1 | 0.236 | 28.0 | 0.0 |
202 | 0.0 | 108.0 | 68.0 | 20.0 | 92.0 | 27.3 | 0.787 | 32.0 | 0.0 |
203 | 2.0 | 99.0 | 70.0 | 16.0 | 44.0 | 20.4 | 0.235 | 27.0 | 0.0 |
204 | 6.0 | 103.0 | 72.0 | 32.0 | 190.0 | 37.7 | 0.324 | 55.0 | 0.0 |
205 | 5.0 | 111.0 | 72.0 | 28.0 | 120.0 | 23.9 | 0.407 | 27.0 | 0.0 |
206 | 8.0 | 196.0 | 76.0 | 29.0 | 280.0 | 37.5 | 0.605 | 57.0 | 1.0 |
207 | 5.0 | 162.0 | 104.0 | 33.0 | 264.0 | 37.7 | 0.151 | 52.0 | 1.0 |
208 | 1.0 | 96.0 | 64.0 | 27.0 | 87.0 | 33.2 | 0.289 | 21.0 | 0.0 |
209 | 7.0 | 184.0 | 84.0 | 33.0 | 197.0 | 35.5 | 0.355 | 41.0 | 1.0 |
210 | 2.0 | 81.0 | 60.0 | 22.0 | 69.0 | 27.7 | 0.290 | 25.0 | 0.0 |
211 | 0.0 | 147.0 | 85.0 | 54.0 | 320.0 | 42.8 | 0.375 | 24.0 | 0.0 |
212 | 7.0 | 179.0 | 95.0 | 31.0 | 167.0 | 34.2 | 0.164 | 60.0 | 0.0 |
213 | 0.0 | 140.0 | 65.0 | 26.0 | 130.0 | 42.6 | 0.431 | 24.0 | 1.0 |
214 | 9.0 | 112.0 | 82.0 | 32.0 | 175.0 | 34.2 | 0.260 | 36.0 | 1.0 |
215 | 12.0 | 151.0 | 70.0 | 40.0 | 271.0 | 41.8 | 0.742 | 38.0 | 1.0 |
216 | 5.0 | 109.0 | 62.0 | 41.0 | 129.0 | 35.8 | 0.514 | 25.0 | 1.0 |
217 | 6.0 | 125.0 | 68.0 | 30.0 | 120.0 | 30.0 | 0.464 | 32.0 | 0.0 |
218 | 5.0 | 85.0 | 74.0 | 22.0 | 113.0 | 29.0 | 1.224 | 32.0 | 1.0 |
219 | 5.0 | 112.0 | 66.0 | 37.0 | 139.0 | 37.8 | 0.261 | 41.0 | 1.0 |
220 | 0.0 | 177.0 | 60.0 | 29.0 | 478.0 | 34.6 | 1.072 | 21.0 | 1.0 |
221 | 2.0 | 158.0 | 90.0 | 24.0 | 192.0 | 31.6 | 0.805 | 66.0 | 1.0 |
222 | 7.0 | 119.0 | 80.0 | 28.0 | 126.0 | 25.2 | 0.209 | 37.0 | 0.0 |
223 | 7.0 | 142.0 | 60.0 | 33.0 | 190.0 | 28.8 | 0.687 | 61.0 | 0.0 |
224 | 1.0 | 100.0 | 66.0 | 15.0 | 56.0 | 23.6 | 0.666 | 26.0 | 0.0 |
225 | 1.0 | 87.0 | 78.0 | 27.0 | 32.0 | 34.6 | 0.101 | 22.0 | 0.0 |
226 | 0.0 | 101.0 | 76.0 | 36.0 | 67.0 | 35.7 | 0.198 | 26.0 | 0.0 |
227 | 3.0 | 162.0 | 52.0 | 38.0 | 308.0 | 37.2 | 0.652 | 24.0 | 1.0 |
228 | 4.0 | 197.0 | 70.0 | 39.0 | 744.0 | 36.7 | 2.329 | 31.0 | 0.0 |
229 | 0.0 | 117.0 | 80.0 | 31.0 | 53.0 | 45.2 | 0.089 | 24.0 | 0.0 |
230 | 4.0 | 142.0 | 86.0 | 41.0 | 153.0 | 44.0 | 0.645 | 22.0 | 1.0 |
231 | 6.0 | 134.0 | 80.0 | 37.0 | 370.0 | 46.2 | 0.238 | 46.0 | 1.0 |
232 | 1.0 | 79.0 | 80.0 | 25.0 | 37.0 | 25.4 | 0.583 | 22.0 | 0.0 |
233 | 4.0 | 122.0 | 68.0 | 32.0 | 142.0 | 35.0 | 0.394 | 29.0 | 0.0 |
234 | 3.0 | 74.0 | 68.0 | 28.0 | 45.0 | 29.7 | 0.293 | 23.0 | 0.0 |
235 | 4.0 | 171.0 | 72.0 | 43.0 | 393.0 | 43.6 | 0.479 | 26.0 | 1.0 |
236 | 7.0 | 181.0 | 84.0 | 21.0 | 192.0 | 35.9 | 0.586 | 51.0 | 1.0 |
237 | 0.0 | 179.0 | 90.0 | 27.0 | 228.0 | 44.1 | 0.686 | 23.0 | 1.0 |
238 | 9.0 | 164.0 | 84.0 | 21.0 | 141.0 | 30.8 | 0.831 | 32.0 | 1.0 |
239 | 0.0 | 104.0 | 76.0 | 15.0 | 64.0 | 18.4 | 0.582 | 27.0 | 0.0 |
240 | 1.0 | 91.0 | 64.0 | 24.0 | 77.0 | 29.2 | 0.192 | 21.0 | 0.0 |
241 | 4.0 | 91.0 | 70.0 | 32.0 | 88.0 | 33.1 | 0.446 | 22.0 | 0.0 |
242 | 3.0 | 139.0 | 54.0 | 32.0 | 207.0 | 25.6 | 0.402 | 22.0 | 1.0 |
243 | 6.0 | 119.0 | 50.0 | 22.0 | 176.0 | 27.1 | 1.318 | 33.0 | 1.0 |
244 | 2.0 | 146.0 | 76.0 | 35.0 | 194.0 | 38.2 | 0.329 | 29.0 | 0.0 |
245 | 9.0 | 184.0 | 85.0 | 15.0 | 185.0 | 30.0 | 1.213 | 49.0 | 1.0 |
246 | 10.0 | 122.0 | 68.0 | 37.0 | 237.0 | 31.2 | 0.258 | 41.0 | 0.0 |
247 | 0.0 | 165.0 | 90.0 | 33.0 | 680.0 | 52.3 | 0.427 | 23.0 | 0.0 |
248 | 9.0 | 124.0 | 70.0 | 33.0 | 402.0 | 35.4 | 0.282 | 34.0 | 0.0 |
249 | 1.0 | 111.0 | 86.0 | 19.0 | 85.0 | 30.1 | 0.143 | 23.0 | 0.0 |
250 | 9.0 | 106.0 | 52.0 | 29.0 | 145.0 | 31.2 | 0.380 | 42.0 | 0.0 |
251 | 2.0 | 129.0 | 84.0 | 24.0 | 173.0 | 28.0 | 0.284 | 27.0 | 0.0 |
252 | 2.0 | 90.0 | 80.0 | 14.0 | 55.0 | 24.4 | 0.249 | 24.0 | 0.0 |
253 | 0.0 | 86.0 | 68.0 | 32.0 | 83.0 | 35.8 | 0.238 | 25.0 | 0.0 |
254 | 12.0 | 92.0 | 62.0 | 7.0 | 258.0 | 27.6 | 0.926 | 44.0 | 1.0 |
255 | 1.0 | 113.0 | 64.0 | 35.0 | 89.0 | 33.6 | 0.543 | 21.0 | 1.0 |
256 | 3.0 | 111.0 | 56.0 | 39.0 | 102.0 | 30.1 | 0.557 | 30.0 | 0.0 |
257 | 2.0 | 114.0 | 68.0 | 22.0 | 69.0 | 28.7 | 0.092 | 25.0 | 0.0 |
258 | 1.0 | 193.0 | 50.0 | 16.0 | 375.0 | 25.9 | 0.655 | 24.0 | 0.0 |
259 | 11.0 | 155.0 | 76.0 | 28.0 | 150.0 | 33.3 | 1.353 | 51.0 | 1.0 |
260 | 3.0 | 191.0 | 68.0 | 15.0 | 130.0 | 30.9 | 0.299 | 34.0 | 0.0 |
261 | 3.0 | 141.0 | 69.0 | 35.0 | 218.0 | 30.0 | 0.761 | 27.0 | 1.0 |
262 | 4.0 | 95.0 | 70.0 | 32.0 | 64.0 | 32.1 | 0.612 | 24.0 | 0.0 |
263 | 3.0 | 142.0 | 80.0 | 15.0 | 190.0 | 32.4 | 0.200 | 63.0 | 0.0 |
264 | 4.0 | 123.0 | 62.0 | 25.0 | 175.0 | 32.0 | 0.226 | 35.0 | 1.0 |
265 | 5.0 | 96.0 | 74.0 | 18.0 | 67.0 | 33.6 | 0.997 | 43.0 | 0.0 |
266 | 0.0 | 138.0 | 64.0 | 34.0 | 182.0 | 36.3 | 0.933 | 25.0 | 1.0 |
267 | 2.0 | 128.0 | 64.0 | 42.0 | 217.0 | 40.0 | 1.101 | 24.0 | 0.0 |
268 | 0.0 | 102.0 | 52.0 | 18.0 | 72.0 | 25.1 | 0.078 | 21.0 | 0.0 |
269 | 2.0 | 146.0 | 63.0 | 33.0 | 271.0 | 27.5 | 0.240 | 28.0 | 1.0 |
270 | 10.0 | 101.0 | 86.0 | 37.0 | 142.0 | 45.6 | 1.136 | 38.0 | 1.0 |
271 | 2.0 | 108.0 | 62.0 | 32.0 | 56.0 | 25.2 | 0.128 | 21.0 | 0.0 |
272 | 3.0 | 122.0 | 78.0 | 26.0 | 61.0 | 23.0 | 0.254 | 40.0 | 0.0 |
273 | 1.0 | 71.0 | 78.0 | 50.0 | 45.0 | 33.2 | 0.422 | 21.0 | 0.0 |
274 | 13.0 | 106.0 | 70.0 | 42.0 | 158.0 | 34.2 | 0.251 | 52.0 | 0.0 |
275 | 2.0 | 100.0 | 70.0 | 52.0 | 57.0 | 40.5 | 0.677 | 25.0 | 0.0 |
276 | 7.0 | 106.0 | 60.0 | 24.0 | 71.0 | 26.5 | 0.296 | 29.0 | 1.0 |
277 | 0.0 | 104.0 | 64.0 | 23.0 | 116.0 | 27.8 | 0.454 | 23.0 | 0.0 |
278 | 5.0 | 114.0 | 74.0 | 19.0 | 142.0 | 24.9 | 0.744 | 57.0 | 0.0 |
279 | 2.0 | 108.0 | 62.0 | 10.0 | 278.0 | 25.3 | 0.881 | 22.0 | 0.0 |
280 | 0.0 | 146.0 | 70.0 | 35.0 | 247.0 | 37.9 | 0.334 | 28.0 | 1.0 |
281 | 10.0 | 129.0 | 76.0 | 28.0 | 122.0 | 35.9 | 0.280 | 39.0 | 0.0 |
282 | 7.0 | 133.0 | 88.0 | 15.0 | 155.0 | 32.4 | 0.262 | 37.0 | 0.0 |
283 | 7.0 | 161.0 | 86.0 | 30.0 | 168.0 | 30.4 | 0.165 | 47.0 | 1.0 |
284 | 2.0 | 108.0 | 80.0 | 31.0 | 321.0 | 27.0 | 0.259 | 52.0 | 1.0 |
285 | 7.0 | 136.0 | 74.0 | 26.0 | 135.0 | 26.0 | 0.647 | 51.0 | 0.0 |
286 | 5.0 | 155.0 | 84.0 | 44.0 | 545.0 | 38.7 | 0.619 | 34.0 | 0.0 |
287 | 1.0 | 119.0 | 86.0 | 39.0 | 220.0 | 45.6 | 0.808 | 29.0 | 1.0 |
288 | 4.0 | 96.0 | 56.0 | 17.0 | 49.0 | 20.8 | 0.340 | 26.0 | 0.0 |
289 | 5.0 | 108.0 | 72.0 | 43.0 | 75.0 | 36.1 | 0.263 | 33.0 | 0.0 |
290 | 0.0 | 78.0 | 88.0 | 29.0 | 40.0 | 36.9 | 0.434 | 21.0 | 0.0 |
291 | 0.0 | 107.0 | 62.0 | 30.0 | 74.0 | 36.6 | 0.757 | 25.0 | 1.0 |
292 | 2.0 | 128.0 | 78.0 | 37.0 | 182.0 | 43.3 | 1.224 | 31.0 | 1.0 |
293 | 1.0 | 128.0 | 48.0 | 45.0 | 194.0 | 40.5 | 0.613 | 24.0 | 1.0 |
294 | 0.0 | 161.0 | 50.0 | 34.0 | 233.0 | 21.9 | 0.254 | 65.0 | 0.0 |
295 | 6.0 | 151.0 | 62.0 | 31.0 | 120.0 | 35.5 | 0.692 | 28.0 | 0.0 |
296 | 2.0 | 146.0 | 70.0 | 38.0 | 360.0 | 28.0 | 0.337 | 29.0 | 1.0 |
297 | 0.0 | 126.0 | 84.0 | 29.0 | 215.0 | 30.7 | 0.520 | 24.0 | 0.0 |
298 | 14.0 | 100.0 | 78.0 | 25.0 | 184.0 | 36.6 | 0.412 | 46.0 | 1.0 |
299 | 8.0 | 112.0 | 72.0 | 19.0 | 142.0 | 23.6 | 0.840 | 58.0 | 0.0 |
300 | 0.0 | 167.0 | 69.0 | 25.0 | 152.0 | 32.3 | 0.839 | 30.0 | 1.0 |
301 | 2.0 | 144.0 | 58.0 | 33.0 | 135.0 | 31.6 | 0.422 | 25.0 | 1.0 |
302 | 5.0 | 77.0 | 82.0 | 41.0 | 42.0 | 35.8 | 0.156 | 35.0 | 0.0 |
303 | 5.0 | 115.0 | 98.0 | 46.0 | 223.0 | 52.9 | 0.209 | 28.0 | 1.0 |
304 | 3.0 | 150.0 | 76.0 | 30.0 | 140.0 | 21.0 | 0.207 | 37.0 | 0.0 |
305 | 2.0 | 120.0 | 76.0 | 37.0 | 105.0 | 39.7 | 0.215 | 29.0 | 0.0 |
306 | 10.0 | 161.0 | 68.0 | 23.0 | 132.0 | 25.5 | 0.326 | 47.0 | 1.0 |
307 | 0.0 | 137.0 | 68.0 | 14.0 | 148.0 | 24.8 | 0.143 | 21.0 | 0.0 |
308 | 0.0 | 128.0 | 68.0 | 19.0 | 180.0 | 30.5 | 1.391 | 25.0 | 1.0 |
309 | 2.0 | 124.0 | 68.0 | 28.0 | 205.0 | 32.9 | 0.875 | 30.0 | 1.0 |
310 | 6.0 | 80.0 | 66.0 | 30.0 | 84.0 | 26.2 | 0.313 | 41.0 | 0.0 |
311 | 0.0 | 106.0 | 70.0 | 37.0 | 148.0 | 39.4 | 0.605 | 22.0 | 0.0 |
312 | 2.0 | 155.0 | 74.0 | 17.0 | 96.0 | 26.6 | 0.433 | 27.0 | 1.0 |
313 | 3.0 | 113.0 | 50.0 | 10.0 | 85.0 | 29.5 | 0.626 | 25.0 | 0.0 |
314 | 7.0 | 109.0 | 80.0 | 31.0 | 191.0 | 35.9 | 1.127 | 43.0 | 1.0 |
315 | 2.0 | 112.0 | 68.0 | 22.0 | 94.0 | 34.1 | 0.315 | 26.0 | 0.0 |
316 | 3.0 | 99.0 | 80.0 | 11.0 | 64.0 | 19.3 | 0.284 | 30.0 | 0.0 |
317 | 3.0 | 182.0 | 74.0 | 40.0 | 197.0 | 30.5 | 0.345 | 29.0 | 1.0 |
318 | 3.0 | 115.0 | 66.0 | 39.0 | 140.0 | 38.1 | 0.150 | 28.0 | 0.0 |
319 | 6.0 | 194.0 | 78.0 | 36.0 | 323.0 | 23.5 | 0.129 | 59.0 | 1.0 |
320 | 4.0 | 129.0 | 60.0 | 12.0 | 231.0 | 27.5 | 0.527 | 31.0 | 0.0 |
321 | 3.0 | 112.0 | 74.0 | 30.0 | 95.0 | 31.6 | 0.197 | 25.0 | 1.0 |
322 | 0.0 | 124.0 | 70.0 | 20.0 | 135.0 | 27.4 | 0.254 | 36.0 | 1.0 |
323 | 13.0 | 152.0 | 90.0 | 33.0 | 29.0 | 26.8 | 0.731 | 43.0 | 1.0 |
324 | 2.0 | 112.0 | 75.0 | 32.0 | 152.0 | 35.7 | 0.148 | 21.0 | 0.0 |
325 | 1.0 | 157.0 | 72.0 | 21.0 | 168.0 | 25.6 | 0.123 | 24.0 | 0.0 |
326 | 1.0 | 122.0 | 64.0 | 32.0 | 156.0 | 35.1 | 0.692 | 30.0 | 1.0 |
327 | 10.0 | 179.0 | 70.0 | 22.0 | 123.0 | 35.1 | 0.200 | 37.0 | 0.0 |
328 | 2.0 | 102.0 | 86.0 | 36.0 | 120.0 | 45.5 | 0.127 | 23.0 | 1.0 |
329 | 6.0 | 105.0 | 70.0 | 32.0 | 68.0 | 30.8 | 0.122 | 37.0 | 0.0 |
330 | 8.0 | 118.0 | 72.0 | 19.0 | 55.0 | 23.1 | 1.476 | 46.0 | 0.0 |
331 | 2.0 | 87.0 | 58.0 | 16.0 | 52.0 | 32.7 | 0.166 | 25.0 | 0.0 |
332 | 1.0 | 180.0 | 77.0 | 33.0 | 191.0 | 43.3 | 0.282 | 41.0 | 1.0 |
333 | 12.0 | 106.0 | 80.0 | 23.0 | 93.0 | 23.6 | 0.137 | 44.0 | 0.0 |
334 | 1.0 | 95.0 | 60.0 | 18.0 | 58.0 | 23.9 | 0.260 | 22.0 | 0.0 |
335 | 0.0 | 165.0 | 76.0 | 43.0 | 255.0 | 47.9 | 0.259 | 26.0 | 0.0 |
336 | 0.0 | 117.0 | 87.0 | 39.0 | 133.0 | 33.8 | 0.932 | 44.0 | 0.0 |
337 | 5.0 | 115.0 | 76.0 | 29.0 | 151.0 | 31.2 | 0.343 | 44.0 | 1.0 |
338 | 9.0 | 152.0 | 78.0 | 34.0 | 171.0 | 34.2 | 0.893 | 33.0 | 1.0 |
339 | 7.0 | 178.0 | 84.0 | 36.0 | 201.0 | 39.9 | 0.331 | 41.0 | 1.0 |
340 | 1.0 | 130.0 | 70.0 | 13.0 | 105.0 | 25.9 | 0.472 | 22.0 | 0.0 |
341 | 1.0 | 95.0 | 74.0 | 21.0 | 73.0 | 25.9 | 0.673 | 36.0 | 0.0 |
342 | 1.0 | 79.0 | 68.0 | 35.0 | 118.0 | 32.0 | 0.389 | 22.0 | 0.0 |
343 | 5.0 | 122.0 | 86.0 | 21.0 | 189.0 | 34.7 | 0.290 | 33.0 | 0.0 |
344 | 8.0 | 95.0 | 72.0 | 40.0 | 141.0 | 36.8 | 0.485 | 57.0 | 0.0 |
345 | 8.0 | 126.0 | 88.0 | 36.0 | 108.0 | 38.5 | 0.349 | 49.0 | 0.0 |
346 | 1.0 | 139.0 | 46.0 | 19.0 | 83.0 | 28.7 | 0.654 | 22.0 | 0.0 |
347 | 3.0 | 116.0 | 71.0 | 18.0 | 75.0 | 23.5 | 0.187 | 23.0 | 0.0 |
348 | 3.0 | 99.0 | 62.0 | 19.0 | 74.0 | 21.8 | 0.279 | 26.0 | 0.0 |
349 | 5.0 | 142.0 | 80.0 | 32.0 | 159.0 | 41.0 | 0.346 | 37.0 | 1.0 |
350 | 4.0 | 92.0 | 80.0 | 42.0 | 74.0 | 42.2 | 0.237 | 29.0 | 0.0 |
351 | 4.0 | 137.0 | 84.0 | 23.0 | 195.0 | 31.2 | 0.252 | 30.0 | 0.0 |
352 | 3.0 | 61.0 | 82.0 | 28.0 | 54.0 | 34.4 | 0.243 | 46.0 | 0.0 |
353 | 1.0 | 90.0 | 62.0 | 12.0 | 43.0 | 27.2 | 0.580 | 24.0 | 0.0 |
354 | 3.0 | 90.0 | 78.0 | 28.0 | 91.0 | 42.7 | 0.559 | 21.0 | 0.0 |
355 | 9.0 | 165.0 | 88.0 | 33.0 | 211.0 | 30.4 | 0.302 | 49.0 | 1.0 |
356 | 1.0 | 125.0 | 50.0 | 40.0 | 167.0 | 33.3 | 0.962 | 28.0 | 1.0 |
357 | 13.0 | 129.0 | 77.0 | 30.0 | 123.0 | 39.9 | 0.569 | 44.0 | 1.0 |
358 | 12.0 | 88.0 | 74.0 | 40.0 | 54.0 | 35.3 | 0.378 | 48.0 | 0.0 |
359 | 1.0 | 196.0 | 76.0 | 36.0 | 249.0 | 36.5 | 0.875 | 29.0 | 1.0 |
360 | 5.0 | 189.0 | 64.0 | 33.0 | 325.0 | 31.2 | 0.583 | 29.0 | 1.0 |
361 | 5.0 | 158.0 | 70.0 | 25.0 | 142.0 | 29.8 | 0.207 | 63.0 | 0.0 |
362 | 5.0 | 103.0 | 108.0 | 37.0 | 95.0 | 39.2 | 0.305 | 65.0 | 0.0 |
363 | 4.0 | 146.0 | 78.0 | 29.0 | 245.0 | 38.5 | 0.520 | 67.0 | 1.0 |
364 | 4.0 | 147.0 | 74.0 | 25.0 | 293.0 | 34.9 | 0.385 | 30.0 | 0.0 |
365 | 5.0 | 99.0 | 54.0 | 28.0 | 83.0 | 34.0 | 0.499 | 30.0 | 0.0 |
366 | 6.0 | 124.0 | 72.0 | 24.0 | 192.0 | 27.6 | 0.368 | 29.0 | 1.0 |
367 | 0.0 | 101.0 | 64.0 | 17.0 | 93.0 | 21.0 | 0.252 | 21.0 | 0.0 |
368 | 3.0 | 81.0 | 86.0 | 16.0 | 66.0 | 27.5 | 0.306 | 22.0 | 0.0 |
369 | 1.0 | 133.0 | 102.0 | 28.0 | 140.0 | 32.8 | 0.234 | 45.0 | 1.0 |
370 | 3.0 | 173.0 | 82.0 | 48.0 | 465.0 | 38.4 | 2.137 | 25.0 | 1.0 |
371 | 0.0 | 118.0 | 64.0 | 23.0 | 89.0 | 31.0 | 1.731 | 21.0 | 0.0 |
372 | 0.0 | 84.0 | 64.0 | 22.0 | 66.0 | 35.8 | 0.545 | 21.0 | 0.0 |
373 | 2.0 | 105.0 | 58.0 | 40.0 | 94.0 | 34.9 | 0.225 | 25.0 | 0.0 |
374 | 2.0 | 122.0 | 52.0 | 43.0 | 158.0 | 36.2 | 0.816 | 28.0 | 0.0 |
375 | 12.0 | 140.0 | 82.0 | 43.0 | 325.0 | 39.2 | 0.528 | 58.0 | 1.0 |
376 | 0.0 | 98.0 | 82.0 | 15.0 | 84.0 | 25.2 | 0.299 | 22.0 | 0.0 |
377 | 1.0 | 87.0 | 60.0 | 37.0 | 75.0 | 37.2 | 0.509 | 22.0 | 0.0 |
378 | 4.0 | 156.0 | 75.0 | 33.0 | 173.0 | 48.3 | 0.238 | 32.0 | 1.0 |
379 | 0.0 | 93.0 | 100.0 | 39.0 | 72.0 | 43.4 | 1.021 | 35.0 | 0.0 |
380 | 1.0 | 107.0 | 72.0 | 30.0 | 82.0 | 30.8 | 0.821 | 24.0 | 0.0 |
381 | 0.0 | 105.0 | 68.0 | 22.0 | 81.0 | 20.0 | 0.236 | 22.0 | 0.0 |
382 | 1.0 | 109.0 | 60.0 | 8.0 | 182.0 | 25.4 | 0.947 | 21.0 | 0.0 |
383 | 1.0 | 90.0 | 62.0 | 18.0 | 59.0 | 25.1 | 1.268 | 25.0 | 0.0 |
384 | 1.0 | 125.0 | 70.0 | 24.0 | 110.0 | 24.3 | 0.221 | 25.0 | 0.0 |
385 | 1.0 | 119.0 | 54.0 | 13.0 | 50.0 | 22.3 | 0.205 | 24.0 | 0.0 |
386 | 5.0 | 116.0 | 74.0 | 29.0 | 145.0 | 32.3 | 0.660 | 35.0 | 1.0 |
387 | 8.0 | 105.0 | 100.0 | 36.0 | 157.0 | 43.3 | 0.239 | 45.0 | 1.0 |
388 | 5.0 | 144.0 | 82.0 | 26.0 | 285.0 | 32.0 | 0.452 | 58.0 | 1.0 |
389 | 3.0 | 100.0 | 68.0 | 23.0 | 81.0 | 31.6 | 0.949 | 28.0 | 0.0 |
390 | 1.0 | 100.0 | 66.0 | 29.0 | 196.0 | 32.0 | 0.444 | 42.0 | 0.0 |
391 | 5.0 | 166.0 | 76.0 | 47.0 | 381.0 | 45.7 | 0.340 | 27.0 | 1.0 |
392 | 1.0 | 131.0 | 64.0 | 14.0 | 415.0 | 23.7 | 0.389 | 21.0 | 0.0 |
393 | 4.0 | 116.0 | 72.0 | 12.0 | 87.0 | 22.1 | 0.463 | 37.0 | 0.0 |
394 | 4.0 | 158.0 | 78.0 | 37.0 | 209.0 | 32.9 | 0.803 | 31.0 | 1.0 |
395 | 2.0 | 127.0 | 58.0 | 24.0 | 275.0 | 27.7 | 1.600 | 25.0 | 0.0 |
396 | 3.0 | 96.0 | 56.0 | 34.0 | 115.0 | 24.7 | 0.944 | 39.0 | 0.0 |
397 | 0.0 | 131.0 | 66.0 | 40.0 | 191.0 | 34.3 | 0.196 | 22.0 | 1.0 |
398 | 3.0 | 82.0 | 70.0 | 15.0 | 70.0 | 21.1 | 0.389 | 25.0 | 0.0 |
399 | 3.0 | 193.0 | 70.0 | 31.0 | 283.0 | 34.9 | 0.241 | 25.0 | 1.0 |
400 | 4.0 | 95.0 | 64.0 | 31.0 | 105.0 | 32.0 | 0.161 | 31.0 | 1.0 |
401 | 6.0 | 137.0 | 61.0 | 29.0 | 155.0 | 24.2 | 0.151 | 55.0 | 0.0 |
402 | 5.0 | 136.0 | 84.0 | 41.0 | 88.0 | 35.0 | 0.286 | 35.0 | 1.0 |
403 | 9.0 | 72.0 | 78.0 | 25.0 | 58.0 | 31.6 | 0.280 | 38.0 | 0.0 |
404 | 5.0 | 168.0 | 64.0 | 33.0 | 196.0 | 32.9 | 0.135 | 41.0 | 1.0 |
405 | 2.0 | 123.0 | 48.0 | 32.0 | 165.0 | 42.1 | 0.520 | 26.0 | 0.0 |
406 | 4.0 | 115.0 | 72.0 | 25.0 | 293.0 | 28.9 | 0.376 | 46.0 | 1.0 |
407 | 0.0 | 101.0 | 62.0 | 17.0 | 73.0 | 21.9 | 0.336 | 25.0 | 0.0 |
408 | 8.0 | 197.0 | 74.0 | 30.0 | 331.0 | 25.9 | 1.191 | 39.0 | 1.0 |
409 | 1.0 | 172.0 | 68.0 | 49.0 | 579.0 | 42.4 | 0.702 | 28.0 | 1.0 |
410 | 6.0 | 102.0 | 90.0 | 39.0 | 101.0 | 35.7 | 0.674 | 28.0 | 0.0 |
411 | 1.0 | 112.0 | 72.0 | 30.0 | 176.0 | 34.4 | 0.528 | 25.0 | 0.0 |
412 | 1.0 | 143.0 | 84.0 | 23.0 | 310.0 | 42.4 | 1.076 | 22.0 | 0.0 |
413 | 1.0 | 143.0 | 74.0 | 22.0 | 61.0 | 26.2 | 0.256 | 21.0 | 0.0 |
414 | 0.0 | 138.0 | 60.0 | 35.0 | 167.0 | 34.6 | 0.534 | 21.0 | 1.0 |
415 | 3.0 | 173.0 | 84.0 | 33.0 | 474.0 | 35.7 | 0.258 | 22.0 | 1.0 |
416 | 1.0 | 97.0 | 68.0 | 21.0 | 50.0 | 27.2 | 1.095 | 22.0 | 0.0 |
417 | 4.0 | 144.0 | 82.0 | 32.0 | 156.0 | 38.5 | 0.554 | 37.0 | 1.0 |
418 | 1.0 | 83.0 | 68.0 | 21.0 | 53.0 | 18.2 | 0.624 | 27.0 | 0.0 |
419 | 3.0 | 129.0 | 64.0 | 29.0 | 115.0 | 26.4 | 0.219 | 28.0 | 1.0 |
420 | 1.0 | 119.0 | 88.0 | 41.0 | 170.0 | 45.3 | 0.507 | 26.0 | 0.0 |
421 | 2.0 | 94.0 | 68.0 | 18.0 | 76.0 | 26.0 | 0.561 | 21.0 | 0.0 |
422 | 0.0 | 102.0 | 64.0 | 46.0 | 78.0 | 40.6 | 0.496 | 21.0 | 0.0 |
423 | 2.0 | 115.0 | 64.0 | 22.0 | 100.0 | 30.8 | 0.421 | 21.0 | 0.0 |
424 | 8.0 | 151.0 | 78.0 | 32.0 | 210.0 | 42.9 | 0.516 | 36.0 | 1.0 |
425 | 4.0 | 184.0 | 78.0 | 39.0 | 277.0 | 37.0 | 0.264 | 31.0 | 1.0 |
426 | 0.0 | 94.0 | 69.0 | 28.0 | 57.0 | 30.5 | 0.256 | 25.0 | 0.0 |
427 | 1.0 | 181.0 | 64.0 | 30.0 | 180.0 | 34.1 | 0.328 | 38.0 | 1.0 |
428 | 0.0 | 135.0 | 94.0 | 46.0 | 145.0 | 40.6 | 0.284 | 26.0 | 0.0 |
429 | 1.0 | 95.0 | 82.0 | 25.0 | 180.0 | 35.0 | 0.233 | 43.0 | 1.0 |
430 | 2.0 | 99.0 | 61.0 | 22.0 | 165.0 | 22.2 | 0.108 | 23.0 | 0.0 |
431 | 3.0 | 89.0 | 74.0 | 16.0 | 85.0 | 30.4 | 0.551 | 38.0 | 0.0 |
432 | 1.0 | 80.0 | 74.0 | 11.0 | 60.0 | 30.0 | 0.527 | 22.0 | 0.0 |
433 | 2.0 | 139.0 | 75.0 | 23.0 | 88.0 | 25.6 | 0.167 | 29.0 | 0.0 |
434 | 1.0 | 90.0 | 68.0 | 8.0 | 68.0 | 24.5 | 1.138 | 36.0 | 0.0 |
435 | 0.0 | 141.0 | 58.0 | 34.0 | 183.0 | 42.4 | 0.205 | 29.0 | 1.0 |
436 | 12.0 | 140.0 | 85.0 | 33.0 | 163.0 | 37.4 | 0.244 | 41.0 | 0.0 |
437 | 5.0 | 147.0 | 75.0 | 27.0 | 170.0 | 29.9 | 0.434 | 28.0 | 0.0 |
438 | 1.0 | 97.0 | 70.0 | 15.0 | 76.0 | 18.2 | 0.147 | 21.0 | 0.0 |
439 | 6.0 | 107.0 | 88.0 | 37.0 | 114.0 | 36.8 | 0.727 | 31.0 | 0.0 |
440 | 0.0 | 189.0 | 104.0 | 25.0 | 157.0 | 34.3 | 0.435 | 41.0 | 1.0 |
441 | 2.0 | 83.0 | 66.0 | 23.0 | 50.0 | 32.2 | 0.497 | 22.0 | 0.0 |
442 | 4.0 | 117.0 | 64.0 | 27.0 | 120.0 | 33.2 | 0.230 | 24.0 | 0.0 |
443 | 8.0 | 108.0 | 70.0 | 32.0 | 122.0 | 30.5 | 0.955 | 33.0 | 1.0 |
444 | 4.0 | 117.0 | 62.0 | 12.0 | 135.0 | 29.7 | 0.380 | 30.0 | 1.0 |
445 | 0.0 | 180.0 | 78.0 | 63.0 | 14.0 | 59.4 | 2.420 | 25.0 | 1.0 |
446 | 1.0 | 100.0 | 72.0 | 12.0 | 70.0 | 25.3 | 0.658 | 28.0 | 0.0 |
447 | 0.0 | 95.0 | 80.0 | 45.0 | 92.0 | 36.5 | 0.330 | 26.0 | 0.0 |
448 | 0.0 | 104.0 | 64.0 | 37.0 | 64.0 | 33.6 | 0.510 | 22.0 | 1.0 |
449 | 0.0 | 120.0 | 74.0 | 18.0 | 63.0 | 30.5 | 0.285 | 26.0 | 0.0 |
450 | 1.0 | 82.0 | 64.0 | 13.0 | 95.0 | 21.2 | 0.415 | 23.0 | 0.0 |
451 | 2.0 | 134.0 | 70.0 | 28.0 | 162.0 | 28.9 | 0.542 | 23.0 | 1.0 |
452 | 0.0 | 91.0 | 68.0 | 32.0 | 210.0 | 39.9 | 0.381 | 25.0 | 0.0 |
453 | 2.0 | 119.0 | 82.0 | 18.0 | 147.0 | 19.6 | 0.832 | 72.0 | 0.0 |
454 | 2.0 | 100.0 | 54.0 | 28.0 | 105.0 | 37.8 | 0.498 | 24.0 | 0.0 |
455 | 14.0 | 175.0 | 62.0 | 30.0 | 200.0 | 33.6 | 0.212 | 38.0 | 1.0 |
456 | 1.0 | 135.0 | 54.0 | 29.0 | 155.0 | 26.7 | 0.687 | 62.0 | 0.0 |
457 | 5.0 | 86.0 | 68.0 | 28.0 | 71.0 | 30.2 | 0.364 | 24.0 | 0.0 |
458 | 10.0 | 148.0 | 84.0 | 48.0 | 237.0 | 37.6 | 1.001 | 51.0 | 1.0 |
459 | 9.0 | 134.0 | 74.0 | 33.0 | 60.0 | 25.9 | 0.460 | 81.0 | 0.0 |
460 | 9.0 | 120.0 | 72.0 | 22.0 | 56.0 | 20.8 | 0.733 | 48.0 | 0.0 |
461 | 1.0 | 71.0 | 62.0 | 24.0 | 42.0 | 21.8 | 0.416 | 26.0 | 0.0 |
462 | 8.0 | 74.0 | 70.0 | 40.0 | 49.0 | 35.3 | 0.705 | 39.0 | 0.0 |
463 | 5.0 | 88.0 | 78.0 | 30.0 | 73.0 | 27.6 | 0.258 | 37.0 | 0.0 |
464 | 10.0 | 115.0 | 98.0 | 16.0 | 101.0 | 24.0 | 1.022 | 34.0 | 0.0 |
465 | 0.0 | 124.0 | 56.0 | 13.0 | 105.0 | 21.8 | 0.452 | 21.0 | 0.0 |
466 | 0.0 | 74.0 | 52.0 | 10.0 | 36.0 | 27.8 | 0.269 | 22.0 | 0.0 |
467 | 0.0 | 97.0 | 64.0 | 36.0 | 100.0 | 36.8 | 0.600 | 25.0 | 0.0 |
468 | 8.0 | 120.0 | 69.0 | 26.0 | 145.0 | 30.0 | 0.183 | 38.0 | 1.0 |
469 | 6.0 | 154.0 | 78.0 | 41.0 | 140.0 | 46.1 | 0.571 | 27.0 | 0.0 |
470 | 1.0 | 144.0 | 82.0 | 40.0 | 317.0 | 41.3 | 0.607 | 28.0 | 0.0 |
471 | 0.0 | 137.0 | 70.0 | 38.0 | 275.0 | 33.2 | 0.170 | 22.0 | 0.0 |
472 | 0.0 | 119.0 | 66.0 | 27.0 | 101.0 | 38.8 | 0.259 | 22.0 | 0.0 |
473 | 7.0 | 136.0 | 90.0 | 35.0 | 196.0 | 29.9 | 0.210 | 50.0 | 0.0 |
474 | 4.0 | 114.0 | 64.0 | 22.0 | 163.0 | 28.9 | 0.126 | 24.0 | 0.0 |
475 | 0.0 | 137.0 | 84.0 | 27.0 | 118.0 | 27.3 | 0.231 | 59.0 | 0.0 |
476 | 2.0 | 105.0 | 80.0 | 45.0 | 191.0 | 33.7 | 0.711 | 29.0 | 1.0 |
477 | 7.0 | 114.0 | 76.0 | 17.0 | 110.0 | 23.8 | 0.466 | 31.0 | 0.0 |
478 | 8.0 | 126.0 | 74.0 | 38.0 | 75.0 | 25.9 | 0.162 | 39.0 | 0.0 |
479 | 4.0 | 132.0 | 86.0 | 31.0 | 118.0 | 28.0 | 0.419 | 63.0 | 0.0 |
480 | 3.0 | 158.0 | 70.0 | 30.0 | 328.0 | 35.5 | 0.344 | 35.0 | 1.0 |
481 | 0.0 | 123.0 | 88.0 | 37.0 | 192.0 | 35.2 | 0.197 | 29.0 | 0.0 |
482 | 4.0 | 85.0 | 58.0 | 22.0 | 49.0 | 27.8 | 0.306 | 28.0 | 0.0 |
483 | 0.0 | 84.0 | 82.0 | 31.0 | 125.0 | 38.2 | 0.233 | 23.0 | 0.0 |
484 | 0.0 | 145.0 | 68.0 | 34.0 | 169.0 | 44.2 | 0.630 | 31.0 | 1.0 |
485 | 0.0 | 135.0 | 68.0 | 42.0 | 250.0 | 42.3 | 0.365 | 24.0 | 1.0 |
486 | 1.0 | 139.0 | 62.0 | 41.0 | 480.0 | 40.7 | 0.536 | 21.0 | 0.0 |
487 | 0.0 | 173.0 | 78.0 | 32.0 | 265.0 | 46.5 | 1.159 | 58.0 | 0.0 |
488 | 4.0 | 99.0 | 72.0 | 17.0 | 57.0 | 25.6 | 0.294 | 28.0 | 0.0 |
489 | 8.0 | 194.0 | 80.0 | 35.0 | 154.0 | 26.1 | 0.551 | 67.0 | 0.0 |
490 | 2.0 | 83.0 | 65.0 | 28.0 | 66.0 | 36.8 | 0.629 | 24.0 | 0.0 |
491 | 2.0 | 89.0 | 90.0 | 30.0 | 84.0 | 33.5 | 0.292 | 42.0 | 0.0 |
492 | 4.0 | 99.0 | 68.0 | 38.0 | 96.0 | 32.8 | 0.145 | 33.0 | 0.0 |
493 | 4.0 | 125.0 | 70.0 | 18.0 | 122.0 | 28.9 | 1.144 | 45.0 | 1.0 |
494 | 3.0 | 80.0 | 69.0 | 16.0 | 54.0 | 27.4 | 0.174 | 22.0 | 0.0 |
495 | 6.0 | 166.0 | 74.0 | 34.0 | 167.0 | 26.6 | 0.304 | 66.0 | 0.0 |
496 | 5.0 | 110.0 | 68.0 | 23.0 | 124.0 | 26.0 | 0.292 | 30.0 | 0.0 |
497 | 2.0 | 81.0 | 72.0 | 15.0 | 76.0 | 30.1 | 0.547 | 25.0 | 0.0 |
498 | 7.0 | 195.0 | 70.0 | 33.0 | 145.0 | 25.1 | 0.163 | 55.0 | 1.0 |
499 | 6.0 | 154.0 | 74.0 | 32.0 | 193.0 | 29.3 | 0.839 | 39.0 | 0.0 |
500 | 2.0 | 117.0 | 90.0 | 19.0 | 71.0 | 25.2 | 0.313 | 21.0 | 0.0 |
501 | 3.0 | 84.0 | 72.0 | 32.0 | 83.0 | 37.2 | 0.267 | 28.0 | 0.0 |
502 | 6.0 | 147.0 | 68.0 | 41.0 | 263.0 | 39.0 | 0.727 | 41.0 | 1.0 |
503 | 7.0 | 94.0 | 64.0 | 25.0 | 79.0 | 33.3 | 0.738 | 41.0 | 0.0 |
504 | 3.0 | 96.0 | 78.0 | 39.0 | 84.0 | 37.3 | 0.238 | 40.0 | 0.0 |
505 | 10.0 | 75.0 | 82.0 | 31.0 | 54.0 | 33.3 | 0.263 | 38.0 | 0.0 |
506 | 0.0 | 180.0 | 90.0 | 26.0 | 90.0 | 36.5 | 0.314 | 35.0 | 1.0 |
507 | 1.0 | 130.0 | 60.0 | 23.0 | 170.0 | 28.6 | 0.692 | 21.0 | 0.0 |
508 | 2.0 | 84.0 | 50.0 | 23.0 | 76.0 | 30.4 | 0.968 | 21.0 | 0.0 |
509 | 8.0 | 120.0 | 78.0 | 18.0 | 187.0 | 25.0 | 0.409 | 64.0 | 0.0 |
510 | 12.0 | 84.0 | 72.0 | 31.0 | 193.0 | 29.7 | 0.297 | 46.0 | 1.0 |
511 | 0.0 | 139.0 | 62.0 | 17.0 | 210.0 | 22.1 | 0.207 | 21.0 | 0.0 |
512 | 9.0 | 91.0 | 68.0 | 33.0 | 120.0 | 24.2 | 0.200 | 58.0 | 0.0 |
513 | 2.0 | 91.0 | 62.0 | 20.0 | 65.0 | 27.3 | 0.525 | 22.0 | 0.0 |
514 | 3.0 | 99.0 | 54.0 | 19.0 | 86.0 | 25.6 | 0.154 | 24.0 | 0.0 |
515 | 3.0 | 163.0 | 70.0 | 18.0 | 105.0 | 31.6 | 0.268 | 28.0 | 1.0 |
516 | 9.0 | 145.0 | 88.0 | 34.0 | 165.0 | 30.3 | 0.771 | 53.0 | 1.0 |
517 | 7.0 | 125.0 | 86.0 | 38.0 | 138.0 | 37.6 | 0.304 | 51.0 | 0.0 |
518 | 13.0 | 76.0 | 60.0 | 30.0 | 77.0 | 32.8 | 0.180 | 41.0 | 0.0 |
519 | 6.0 | 129.0 | 90.0 | 7.0 | 326.0 | 19.6 | 0.582 | 60.0 | 0.0 |
520 | 2.0 | 68.0 | 70.0 | 32.0 | 66.0 | 25.0 | 0.187 | 25.0 | 0.0 |
521 | 3.0 | 124.0 | 80.0 | 33.0 | 130.0 | 33.2 | 0.305 | 26.0 | 0.0 |
522 | 6.0 | 114.0 | 62.0 | 26.0 | 115.0 | 27.4 | 0.189 | 26.0 | 0.0 |
523 | 9.0 | 130.0 | 70.0 | 35.0 | 121.0 | 34.2 | 0.652 | 45.0 | 1.0 |
524 | 3.0 | 125.0 | 58.0 | 24.0 | 178.0 | 31.6 | 0.151 | 24.0 | 0.0 |
525 | 3.0 | 87.0 | 60.0 | 18.0 | 57.0 | 21.8 | 0.444 | 21.0 | 0.0 |
526 | 1.0 | 97.0 | 64.0 | 19.0 | 82.0 | 18.2 | 0.299 | 21.0 | 0.0 |
527 | 3.0 | 116.0 | 74.0 | 15.0 | 105.0 | 26.3 | 0.107 | 24.0 | 0.0 |
528 | 0.0 | 117.0 | 66.0 | 31.0 | 188.0 | 30.8 | 0.493 | 22.0 | 0.0 |
529 | 0.0 | 111.0 | 65.0 | 22.0 | 193.0 | 24.6 | 0.660 | 31.0 | 0.0 |
530 | 2.0 | 122.0 | 60.0 | 18.0 | 106.0 | 29.8 | 0.717 | 22.0 | 0.0 |
531 | 0.0 | 107.0 | 76.0 | 36.0 | 191.0 | 45.3 | 0.686 | 24.0 | 0.0 |
532 | 1.0 | 86.0 | 66.0 | 52.0 | 65.0 | 41.3 | 0.917 | 29.0 | 0.0 |
533 | 6.0 | 91.0 | 69.0 | 27.0 | 49.0 | 29.8 | 0.501 | 31.0 | 0.0 |
534 | 1.0 | 77.0 | 56.0 | 30.0 | 56.0 | 33.3 | 1.251 | 24.0 | 0.0 |
535 | 4.0 | 132.0 | 69.0 | 28.0 | 167.0 | 32.9 | 0.302 | 23.0 | 1.0 |
536 | 0.0 | 105.0 | 90.0 | 27.0 | 96.0 | 29.6 | 0.197 | 46.0 | 0.0 |
537 | 0.0 | 57.0 | 60.0 | 29.0 | 77.0 | 21.7 | 0.735 | 67.0 | 0.0 |
538 | 0.0 | 127.0 | 80.0 | 37.0 | 210.0 | 36.3 | 0.804 | 23.0 | 0.0 |
539 | 3.0 | 129.0 | 92.0 | 49.0 | 155.0 | 36.4 | 0.968 | 32.0 | 1.0 |
540 | 8.0 | 100.0 | 74.0 | 40.0 | 215.0 | 39.4 | 0.661 | 43.0 | 1.0 |
541 | 3.0 | 128.0 | 72.0 | 25.0 | 190.0 | 32.4 | 0.549 | 27.0 | 1.0 |
542 | 10.0 | 90.0 | 85.0 | 32.0 | 193.0 | 34.9 | 0.825 | 56.0 | 1.0 |
543 | 4.0 | 84.0 | 90.0 | 23.0 | 56.0 | 39.5 | 0.159 | 25.0 | 0.0 |
544 | 1.0 | 88.0 | 78.0 | 29.0 | 76.0 | 32.0 | 0.365 | 29.0 | 0.0 |
545 | 8.0 | 186.0 | 90.0 | 35.0 | 225.0 | 34.5 | 0.423 | 37.0 | 1.0 |
546 | 5.0 | 187.0 | 76.0 | 27.0 | 207.0 | 43.6 | 1.034 | 53.0 | 1.0 |
547 | 4.0 | 131.0 | 68.0 | 21.0 | 166.0 | 33.1 | 0.160 | 28.0 | 0.0 |
548 | 1.0 | 164.0 | 82.0 | 43.0 | 67.0 | 32.8 | 0.341 | 50.0 | 0.0 |
549 | 4.0 | 189.0 | 110.0 | 31.0 | 497.0 | 28.5 | 0.680 | 37.0 | 0.0 |
550 | 1.0 | 116.0 | 70.0 | 28.0 | 151.0 | 27.4 | 0.204 | 21.0 | 0.0 |
551 | 3.0 | 84.0 | 68.0 | 30.0 | 106.0 | 31.9 | 0.591 | 25.0 | 0.0 |
552 | 6.0 | 114.0 | 88.0 | 17.0 | 205.0 | 27.8 | 0.247 | 66.0 | 0.0 |
553 | 1.0 | 88.0 | 62.0 | 24.0 | 44.0 | 29.9 | 0.422 | 23.0 | 0.0 |
554 | 1.0 | 84.0 | 64.0 | 23.0 | 115.0 | 36.9 | 0.471 | 28.0 | 0.0 |
555 | 7.0 | 124.0 | 70.0 | 33.0 | 215.0 | 25.5 | 0.161 | 37.0 | 0.0 |
556 | 1.0 | 97.0 | 70.0 | 40.0 | 133.0 | 38.1 | 0.218 | 30.0 | 0.0 |
557 | 8.0 | 110.0 | 76.0 | 30.0 | 142.0 | 27.8 | 0.237 | 58.0 | 0.0 |
558 | 11.0 | 103.0 | 68.0 | 40.0 | 123.0 | 46.2 | 0.126 | 42.0 | 0.0 |
559 | 11.0 | 85.0 | 74.0 | 24.0 | 77.0 | 30.1 | 0.300 | 35.0 | 0.0 |
560 | 6.0 | 125.0 | 76.0 | 24.0 | 291.0 | 33.8 | 0.121 | 54.0 | 1.0 |
561 | 0.0 | 198.0 | 66.0 | 32.0 | 274.0 | 41.3 | 0.502 | 28.0 | 1.0 |
562 | 1.0 | 87.0 | 68.0 | 34.0 | 77.0 | 37.6 | 0.401 | 24.0 | 0.0 |
563 | 6.0 | 99.0 | 60.0 | 19.0 | 54.0 | 26.9 | 0.497 | 32.0 | 0.0 |
564 | 0.0 | 91.0 | 80.0 | 36.0 | 68.0 | 32.4 | 0.601 | 27.0 | 0.0 |
565 | 2.0 | 95.0 | 54.0 | 14.0 | 88.0 | 26.1 | 0.748 | 22.0 | 0.0 |
566 | 1.0 | 99.0 | 72.0 | 30.0 | 18.0 | 38.6 | 0.412 | 21.0 | 0.0 |
567 | 6.0 | 92.0 | 62.0 | 32.0 | 126.0 | 32.0 | 0.085 | 46.0 | 0.0 |
568 | 4.0 | 154.0 | 72.0 | 29.0 | 126.0 | 31.3 | 0.338 | 37.0 | 0.0 |
569 | 0.0 | 121.0 | 66.0 | 30.0 | 165.0 | 34.3 | 0.203 | 33.0 | 1.0 |
570 | 3.0 | 78.0 | 70.0 | 31.0 | 58.0 | 32.5 | 0.270 | 39.0 | 0.0 |
571 | 2.0 | 130.0 | 96.0 | 21.0 | 125.0 | 22.6 | 0.268 | 21.0 | 0.0 |
572 | 3.0 | 111.0 | 58.0 | 31.0 | 44.0 | 29.5 | 0.430 | 22.0 | 0.0 |
573 | 2.0 | 98.0 | 60.0 | 17.0 | 120.0 | 34.7 | 0.198 | 22.0 | 0.0 |
574 | 1.0 | 143.0 | 86.0 | 30.0 | 330.0 | 30.1 | 0.892 | 23.0 | 0.0 |
575 | 1.0 | 119.0 | 44.0 | 47.0 | 63.0 | 35.5 | 0.280 | 25.0 | 0.0 |
576 | 6.0 | 108.0 | 44.0 | 20.0 | 130.0 | 24.0 | 0.813 | 35.0 | 0.0 |
577 | 2.0 | 118.0 | 80.0 | 41.0 | 211.0 | 42.9 | 0.693 | 21.0 | 1.0 |
578 | 10.0 | 133.0 | 68.0 | 34.0 | 137.0 | 27.0 | 0.245 | 36.0 | 0.0 |
579 | 2.0 | 197.0 | 70.0 | 99.0 | 287.0 | 34.7 | 0.575 | 62.0 | 1.0 |
580 | 0.0 | 151.0 | 90.0 | 46.0 | 205.0 | 42.1 | 0.371 | 21.0 | 1.0 |
581 | 6.0 | 109.0 | 60.0 | 27.0 | 88.0 | 25.0 | 0.206 | 27.0 | 0.0 |
582 | 12.0 | 121.0 | 78.0 | 17.0 | 172.0 | 26.5 | 0.259 | 62.0 | 0.0 |
583 | 8.0 | 100.0 | 76.0 | 32.0 | 80.0 | 38.7 | 0.190 | 42.0 | 0.0 |
584 | 8.0 | 124.0 | 76.0 | 24.0 | 600.0 | 28.7 | 0.687 | 52.0 | 1.0 |
585 | 1.0 | 93.0 | 56.0 | 11.0 | 67.0 | 22.5 | 0.417 | 22.0 | 0.0 |
586 | 8.0 | 143.0 | 66.0 | 34.0 | 169.0 | 34.9 | 0.129 | 41.0 | 1.0 |
587 | 6.0 | 103.0 | 66.0 | 22.0 | 107.0 | 24.3 | 0.249 | 29.0 | 0.0 |
588 | 3.0 | 176.0 | 86.0 | 27.0 | 156.0 | 33.3 | 1.154 | 52.0 | 1.0 |
589 | 0.0 | 73.0 | 53.0 | 16.0 | 52.0 | 21.1 | 0.342 | 25.0 | 0.0 |
590 | 11.0 | 111.0 | 84.0 | 40.0 | 180.0 | 46.8 | 0.925 | 45.0 | 1.0 |
591 | 2.0 | 112.0 | 78.0 | 50.0 | 140.0 | 39.4 | 0.175 | 24.0 | 0.0 |
592 | 3.0 | 132.0 | 80.0 | 35.0 | 109.0 | 34.4 | 0.402 | 44.0 | 1.0 |
593 | 2.0 | 82.0 | 52.0 | 22.0 | 115.0 | 28.5 | 1.699 | 25.0 | 0.0 |
594 | 6.0 | 123.0 | 72.0 | 45.0 | 230.0 | 33.6 | 0.733 | 34.0 | 0.0 |
595 | 0.0 | 188.0 | 82.0 | 14.0 | 185.0 | 32.0 | 0.682 | 22.0 | 1.0 |
596 | 0.0 | 67.0 | 76.0 | 30.0 | 46.0 | 45.3 | 0.194 | 46.0 | 0.0 |
597 | 1.0 | 89.0 | 24.0 | 19.0 | 25.0 | 27.8 | 0.559 | 21.0 | 0.0 |
598 | 1.0 | 173.0 | 74.0 | 37.0 | 161.0 | 36.8 | 0.088 | 38.0 | 1.0 |
599 | 1.0 | 109.0 | 38.0 | 18.0 | 120.0 | 23.1 | 0.407 | 26.0 | 0.0 |
600 | 1.0 | 108.0 | 88.0 | 19.0 | 85.0 | 27.1 | 0.400 | 24.0 | 0.0 |
601 | 6.0 | 96.0 | 63.0 | 18.0 | 57.0 | 23.7 | 0.190 | 28.0 | 0.0 |
602 | 1.0 | 124.0 | 74.0 | 36.0 | 109.0 | 27.8 | 0.100 | 30.0 | 0.0 |
603 | 7.0 | 150.0 | 78.0 | 29.0 | 126.0 | 35.2 | 0.692 | 54.0 | 1.0 |
604 | 4.0 | 183.0 | 70.0 | 35.0 | 234.0 | 28.4 | 0.212 | 36.0 | 1.0 |
605 | 1.0 | 124.0 | 60.0 | 32.0 | 145.0 | 35.8 | 0.514 | 21.0 | 0.0 |
606 | 1.0 | 181.0 | 78.0 | 42.0 | 293.0 | 40.0 | 1.258 | 22.0 | 1.0 |
607 | 1.0 | 92.0 | 62.0 | 25.0 | 41.0 | 19.5 | 0.482 | 25.0 | 0.0 |
608 | 0.0 | 152.0 | 82.0 | 39.0 | 272.0 | 41.5 | 0.270 | 27.0 | 0.0 |
609 | 1.0 | 111.0 | 62.0 | 13.0 | 182.0 | 24.0 | 0.138 | 23.0 | 0.0 |
610 | 3.0 | 106.0 | 54.0 | 21.0 | 158.0 | 30.9 | 0.292 | 24.0 | 0.0 |
611 | 3.0 | 174.0 | 58.0 | 22.0 | 194.0 | 32.9 | 0.593 | 36.0 | 1.0 |
612 | 7.0 | 168.0 | 88.0 | 42.0 | 321.0 | 38.2 | 0.787 | 40.0 | 1.0 |
613 | 6.0 | 105.0 | 80.0 | 28.0 | 183.0 | 32.5 | 0.878 | 26.0 | 0.0 |
614 | 11.0 | 138.0 | 74.0 | 26.0 | 144.0 | 36.1 | 0.557 | 50.0 | 1.0 |
615 | 3.0 | 106.0 | 72.0 | 26.0 | 111.0 | 25.8 | 0.207 | 27.0 | 0.0 |
616 | 6.0 | 117.0 | 96.0 | 23.0 | 101.0 | 28.7 | 0.157 | 30.0 | 0.0 |
617 | 2.0 | 68.0 | 62.0 | 13.0 | 15.0 | 20.1 | 0.257 | 23.0 | 0.0 |
618 | 9.0 | 112.0 | 82.0 | 24.0 | 300.0 | 28.2 | 1.282 | 50.0 | 1.0 |
619 | 0.0 | 119.0 | 64.0 | 36.0 | 176.0 | 32.4 | 0.141 | 24.0 | 1.0 |
620 | 2.0 | 112.0 | 86.0 | 42.0 | 160.0 | 38.4 | 0.246 | 28.0 | 0.0 |
621 | 2.0 | 92.0 | 76.0 | 20.0 | 44.0 | 24.2 | 1.698 | 28.0 | 0.0 |
622 | 6.0 | 183.0 | 94.0 | 31.0 | 154.0 | 40.8 | 1.461 | 45.0 | 0.0 |
623 | 0.0 | 94.0 | 70.0 | 27.0 | 115.0 | 43.5 | 0.347 | 21.0 | 0.0 |
624 | 2.0 | 108.0 | 64.0 | 30.0 | 150.0 | 30.8 | 0.158 | 21.0 | 0.0 |
625 | 4.0 | 90.0 | 88.0 | 47.0 | 54.0 | 37.7 | 0.362 | 29.0 | 0.0 |
626 | 0.0 | 125.0 | 68.0 | 19.0 | 79.0 | 24.7 | 0.206 | 21.0 | 0.0 |
627 | 0.0 | 132.0 | 78.0 | 28.0 | 146.0 | 32.4 | 0.393 | 21.0 | 0.0 |
628 | 5.0 | 128.0 | 80.0 | 30.0 | 84.0 | 34.6 | 0.144 | 45.0 | 0.0 |
629 | 4.0 | 94.0 | 65.0 | 22.0 | 76.0 | 24.7 | 0.148 | 21.0 | 0.0 |
630 | 7.0 | 114.0 | 64.0 | 26.0 | 120.0 | 27.4 | 0.732 | 34.0 | 1.0 |
631 | 0.0 | 102.0 | 78.0 | 40.0 | 90.0 | 34.5 | 0.238 | 24.0 | 0.0 |
632 | 2.0 | 111.0 | 60.0 | 10.0 | 214.0 | 26.2 | 0.343 | 23.0 | 0.0 |
633 | 1.0 | 128.0 | 82.0 | 17.0 | 183.0 | 27.5 | 0.115 | 22.0 | 0.0 |
634 | 10.0 | 92.0 | 62.0 | 21.0 | 42.0 | 25.9 | 0.167 | 31.0 | 0.0 |
635 | 13.0 | 104.0 | 72.0 | 27.0 | 162.0 | 31.2 | 0.465 | 38.0 | 1.0 |
636 | 5.0 | 104.0 | 74.0 | 30.0 | 121.0 | 28.8 | 0.153 | 48.0 | 0.0 |
637 | 2.0 | 94.0 | 76.0 | 18.0 | 66.0 | 31.6 | 0.649 | 23.0 | 0.0 |
638 | 7.0 | 97.0 | 76.0 | 32.0 | 91.0 | 40.9 | 0.871 | 32.0 | 1.0 |
639 | 1.0 | 100.0 | 74.0 | 12.0 | 46.0 | 19.5 | 0.149 | 28.0 | 0.0 |
640 | 0.0 | 102.0 | 86.0 | 17.0 | 105.0 | 29.3 | 0.695 | 27.0 | 0.0 |
641 | 4.0 | 128.0 | 70.0 | 27.0 | 149.0 | 34.3 | 0.303 | 24.0 | 0.0 |
642 | 6.0 | 147.0 | 80.0 | 30.0 | 192.0 | 29.5 | 0.178 | 50.0 | 1.0 |
643 | 4.0 | 90.0 | 73.0 | 23.0 | 49.0 | 28.0 | 0.610 | 31.0 | 0.0 |
644 | 3.0 | 103.0 | 72.0 | 30.0 | 152.0 | 27.6 | 0.730 | 27.0 | 0.0 |
645 | 2.0 | 157.0 | 74.0 | 35.0 | 440.0 | 39.4 | 0.134 | 30.0 | 0.0 |
646 | 1.0 | 167.0 | 74.0 | 17.0 | 144.0 | 23.4 | 0.447 | 33.0 | 1.0 |
647 | 0.0 | 179.0 | 50.0 | 36.0 | 159.0 | 37.8 | 0.455 | 22.0 | 1.0 |
648 | 11.0 | 136.0 | 84.0 | 35.0 | 130.0 | 28.3 | 0.260 | 42.0 | 1.0 |
649 | 0.0 | 107.0 | 60.0 | 25.0 | 139.0 | 26.4 | 0.133 | 23.0 | 0.0 |
650 | 1.0 | 91.0 | 54.0 | 25.0 | 100.0 | 25.2 | 0.234 | 23.0 | 0.0 |
651 | 1.0 | 117.0 | 60.0 | 23.0 | 106.0 | 33.8 | 0.466 | 27.0 | 0.0 |
652 | 5.0 | 123.0 | 74.0 | 40.0 | 77.0 | 34.1 | 0.269 | 28.0 | 0.0 |
653 | 2.0 | 120.0 | 54.0 | 18.0 | 144.0 | 26.8 | 0.455 | 27.0 | 0.0 |
654 | 1.0 | 106.0 | 70.0 | 28.0 | 135.0 | 34.2 | 0.142 | 22.0 | 0.0 |
655 | 2.0 | 155.0 | 52.0 | 27.0 | 540.0 | 38.7 | 0.240 | 25.0 | 1.0 |
656 | 2.0 | 101.0 | 58.0 | 35.0 | 90.0 | 21.8 | 0.155 | 22.0 | 0.0 |
657 | 1.0 | 120.0 | 80.0 | 48.0 | 200.0 | 38.9 | 1.162 | 41.0 | 0.0 |
658 | 11.0 | 127.0 | 106.0 | 31.0 | 123.0 | 39.0 | 0.190 | 51.0 | 0.0 |
659 | 3.0 | 80.0 | 82.0 | 31.0 | 70.0 | 34.2 | 1.292 | 27.0 | 1.0 |
660 | 10.0 | 162.0 | 84.0 | 33.0 | 112.0 | 27.7 | 0.182 | 54.0 | 0.0 |
661 | 1.0 | 199.0 | 76.0 | 43.0 | 272.0 | 42.9 | 1.394 | 22.0 | 1.0 |
662 | 8.0 | 167.0 | 106.0 | 46.0 | 231.0 | 37.6 | 0.165 | 43.0 | 1.0 |
663 | 9.0 | 145.0 | 80.0 | 46.0 | 130.0 | 37.9 | 0.637 | 40.0 | 1.0 |
664 | 6.0 | 115.0 | 60.0 | 39.0 | 139.0 | 33.7 | 0.245 | 40.0 | 1.0 |
665 | 1.0 | 112.0 | 80.0 | 45.0 | 132.0 | 34.8 | 0.217 | 24.0 | 0.0 |
666 | 4.0 | 145.0 | 82.0 | 18.0 | 192.0 | 32.5 | 0.235 | 70.0 | 1.0 |
667 | 10.0 | 111.0 | 70.0 | 27.0 | 122.0 | 27.5 | 0.141 | 40.0 | 1.0 |
668 | 6.0 | 98.0 | 58.0 | 33.0 | 190.0 | 34.0 | 0.430 | 43.0 | 0.0 |
669 | 9.0 | 154.0 | 78.0 | 30.0 | 100.0 | 30.9 | 0.164 | 45.0 | 0.0 |
670 | 6.0 | 165.0 | 68.0 | 26.0 | 168.0 | 33.6 | 0.631 | 49.0 | 0.0 |
671 | 1.0 | 99.0 | 58.0 | 10.0 | 149.0 | 25.4 | 0.551 | 21.0 | 0.0 |
672 | 10.0 | 68.0 | 106.0 | 23.0 | 49.0 | 35.5 | 0.285 | 47.0 | 0.0 |
673 | 3.0 | 123.0 | 100.0 | 35.0 | 240.0 | 57.3 | 0.880 | 22.0 | 0.0 |
674 | 8.0 | 91.0 | 82.0 | 33.0 | 141.0 | 35.6 | 0.587 | 68.0 | 0.0 |
675 | 6.0 | 195.0 | 70.0 | 33.0 | 258.0 | 30.9 | 0.328 | 31.0 | 1.0 |
676 | 9.0 | 156.0 | 86.0 | 32.0 | 115.0 | 24.8 | 0.230 | 53.0 | 1.0 |
677 | 0.0 | 93.0 | 60.0 | 32.0 | 102.0 | 35.3 | 0.263 | 25.0 | 0.0 |
678 | 3.0 | 121.0 | 52.0 | 36.0 | 179.0 | 36.0 | 0.127 | 25.0 | 1.0 |
679 | 2.0 | 101.0 | 58.0 | 17.0 | 265.0 | 24.2 | 0.614 | 23.0 | 0.0 |
680 | 2.0 | 56.0 | 56.0 | 28.0 | 45.0 | 24.2 | 0.332 | 22.0 | 0.0 |
681 | 0.0 | 162.0 | 76.0 | 36.0 | 180.0 | 49.6 | 0.364 | 26.0 | 1.0 |
682 | 0.0 | 95.0 | 64.0 | 39.0 | 105.0 | 44.6 | 0.366 | 22.0 | 0.0 |
683 | 4.0 | 125.0 | 80.0 | 28.0 | 150.0 | 32.3 | 0.536 | 27.0 | 1.0 |
684 | 5.0 | 136.0 | 82.0 | 24.0 | 165.0 | 29.1 | 0.640 | 69.0 | 0.0 |
685 | 2.0 | 129.0 | 74.0 | 26.0 | 205.0 | 33.2 | 0.591 | 25.0 | 0.0 |
686 | 3.0 | 130.0 | 64.0 | 17.0 | 230.0 | 23.1 | 0.314 | 22.0 | 0.0 |
687 | 1.0 | 107.0 | 50.0 | 19.0 | 96.0 | 28.3 | 0.181 | 29.0 | 0.0 |
688 | 1.0 | 140.0 | 74.0 | 26.0 | 180.0 | 24.1 | 0.828 | 23.0 | 0.0 |
689 | 1.0 | 144.0 | 82.0 | 46.0 | 180.0 | 46.1 | 0.335 | 46.0 | 1.0 |
690 | 8.0 | 107.0 | 80.0 | 21.0 | 91.0 | 24.6 | 0.856 | 34.0 | 0.0 |
691 | 13.0 | 158.0 | 114.0 | 34.0 | 206.0 | 42.3 | 0.257 | 44.0 | 1.0 |
692 | 2.0 | 121.0 | 70.0 | 32.0 | 95.0 | 39.1 | 0.886 | 23.0 | 0.0 |
693 | 7.0 | 129.0 | 68.0 | 49.0 | 125.0 | 38.5 | 0.439 | 43.0 | 1.0 |
694 | 2.0 | 90.0 | 60.0 | 14.0 | 52.0 | 23.5 | 0.191 | 25.0 | 0.0 |
695 | 7.0 | 142.0 | 90.0 | 24.0 | 480.0 | 30.4 | 0.128 | 43.0 | 1.0 |
696 | 3.0 | 169.0 | 74.0 | 19.0 | 125.0 | 29.9 | 0.268 | 31.0 | 1.0 |
697 | 0.0 | 99.0 | 63.0 | 15.0 | 106.0 | 25.0 | 0.253 | 22.0 | 0.0 |
698 | 4.0 | 127.0 | 88.0 | 11.0 | 155.0 | 34.5 | 0.598 | 28.0 | 0.0 |
699 | 4.0 | 118.0 | 70.0 | 34.0 | 123.0 | 44.5 | 0.904 | 26.0 | 0.0 |
700 | 2.0 | 122.0 | 76.0 | 27.0 | 200.0 | 35.9 | 0.483 | 26.0 | 0.0 |
701 | 6.0 | 125.0 | 78.0 | 31.0 | 282.0 | 27.6 | 0.565 | 49.0 | 1.0 |
702 | 1.0 | 168.0 | 88.0 | 29.0 | 216.0 | 35.0 | 0.905 | 52.0 | 1.0 |
703 | 2.0 | 129.0 | 80.0 | 30.0 | 159.0 | 38.5 | 0.304 | 41.0 | 0.0 |
704 | 4.0 | 110.0 | 76.0 | 20.0 | 100.0 | 28.4 | 0.118 | 27.0 | 0.0 |
705 | 6.0 | 80.0 | 80.0 | 36.0 | 75.0 | 39.8 | 0.177 | 28.0 | 0.0 |
706 | 10.0 | 115.0 | 81.0 | 29.0 | 155.0 | 36.4 | 0.261 | 30.0 | 1.0 |
707 | 2.0 | 127.0 | 46.0 | 21.0 | 335.0 | 34.4 | 0.176 | 22.0 | 0.0 |
708 | 9.0 | 164.0 | 78.0 | 29.0 | 160.0 | 32.8 | 0.148 | 45.0 | 1.0 |
709 | 2.0 | 93.0 | 64.0 | 32.0 | 160.0 | 38.0 | 0.674 | 23.0 | 1.0 |
710 | 3.0 | 158.0 | 64.0 | 13.0 | 387.0 | 31.2 | 0.295 | 24.0 | 0.0 |
711 | 5.0 | 126.0 | 78.0 | 27.0 | 22.0 | 29.6 | 0.439 | 40.0 | 0.0 |
712 | 10.0 | 129.0 | 62.0 | 36.0 | 130.0 | 41.2 | 0.441 | 38.0 | 1.0 |
713 | 0.0 | 134.0 | 58.0 | 20.0 | 291.0 | 26.4 | 0.352 | 21.0 | 0.0 |
714 | 3.0 | 102.0 | 74.0 | 25.0 | 97.0 | 29.5 | 0.121 | 32.0 | 0.0 |
715 | 7.0 | 187.0 | 50.0 | 33.0 | 392.0 | 33.9 | 0.826 | 34.0 | 1.0 |
716 | 3.0 | 173.0 | 78.0 | 39.0 | 185.0 | 33.8 | 0.970 | 31.0 | 1.0 |
717 | 10.0 | 94.0 | 72.0 | 18.0 | 77.0 | 23.1 | 0.595 | 56.0 | 0.0 |
718 | 1.0 | 108.0 | 60.0 | 46.0 | 178.0 | 35.5 | 0.415 | 24.0 | 0.0 |
719 | 5.0 | 97.0 | 76.0 | 27.0 | 193.0 | 35.6 | 0.378 | 52.0 | 1.0 |
720 | 4.0 | 83.0 | 86.0 | 19.0 | 74.0 | 29.3 | 0.317 | 34.0 | 0.0 |
721 | 1.0 | 114.0 | 66.0 | 36.0 | 200.0 | 38.1 | 0.289 | 21.0 | 0.0 |
722 | 1.0 | 149.0 | 68.0 | 29.0 | 127.0 | 29.3 | 0.349 | 42.0 | 1.0 |
723 | 5.0 | 117.0 | 86.0 | 30.0 | 105.0 | 39.1 | 0.251 | 42.0 | 0.0 |
724 | 1.0 | 111.0 | 94.0 | 26.0 | 128.0 | 32.8 | 0.265 | 45.0 | 0.0 |
725 | 4.0 | 112.0 | 78.0 | 40.0 | 127.0 | 39.4 | 0.236 | 38.0 | 0.0 |
726 | 1.0 | 116.0 | 78.0 | 29.0 | 180.0 | 36.1 | 0.496 | 25.0 | 0.0 |
727 | 0.0 | 141.0 | 84.0 | 26.0 | 247.0 | 32.4 | 0.433 | 22.0 | 0.0 |
728 | 2.0 | 175.0 | 88.0 | 25.0 | 87.0 | 22.9 | 0.326 | 22.0 | 0.0 |
729 | 2.0 | 92.0 | 52.0 | 23.0 | 84.0 | 30.1 | 0.141 | 22.0 | 0.0 |
730 | 3.0 | 130.0 | 78.0 | 23.0 | 79.0 | 28.4 | 0.323 | 34.0 | 1.0 |
731 | 8.0 | 120.0 | 86.0 | 35.0 | 148.0 | 28.4 | 0.259 | 22.0 | 1.0 |
732 | 2.0 | 174.0 | 88.0 | 37.0 | 120.0 | 44.5 | 0.646 | 24.0 | 1.0 |
733 | 2.0 | 106.0 | 56.0 | 27.0 | 165.0 | 29.0 | 0.426 | 22.0 | 0.0 |
734 | 2.0 | 105.0 | 75.0 | 25.0 | 158.0 | 23.3 | 0.560 | 53.0 | 0.0 |
735 | 4.0 | 95.0 | 60.0 | 32.0 | 96.0 | 35.4 | 0.284 | 28.0 | 0.0 |
736 | 0.0 | 126.0 | 86.0 | 27.0 | 120.0 | 27.4 | 0.515 | 21.0 | 0.0 |
737 | 8.0 | 65.0 | 72.0 | 23.0 | 58.0 | 32.0 | 0.600 | 42.0 | 0.0 |
738 | 2.0 | 99.0 | 60.0 | 17.0 | 160.0 | 36.6 | 0.453 | 21.0 | 0.0 |
739 | 1.0 | 102.0 | 74.0 | 38.0 | 184.0 | 39.5 | 0.293 | 42.0 | 1.0 |
740 | 11.0 | 120.0 | 80.0 | 37.0 | 150.0 | 42.3 | 0.785 | 48.0 | 1.0 |
741 | 3.0 | 102.0 | 44.0 | 20.0 | 94.0 | 30.8 | 0.400 | 26.0 | 0.0 |
742 | 1.0 | 109.0 | 58.0 | 18.0 | 116.0 | 28.5 | 0.219 | 22.0 | 0.0 |
743 | 9.0 | 140.0 | 94.0 | 30.0 | 264.0 | 32.7 | 0.734 | 45.0 | 1.0 |
744 | 13.0 | 153.0 | 88.0 | 37.0 | 140.0 | 40.6 | 1.174 | 39.0 | 0.0 |
745 | 12.0 | 100.0 | 84.0 | 33.0 | 105.0 | 30.0 | 0.488 | 46.0 | 0.0 |
746 | 1.0 | 147.0 | 94.0 | 41.0 | 169.0 | 49.3 | 0.358 | 27.0 | 1.0 |
747 | 1.0 | 81.0 | 74.0 | 41.0 | 57.0 | 46.3 | 1.096 | 32.0 | 0.0 |
748 | 3.0 | 187.0 | 70.0 | 22.0 | 200.0 | 36.4 | 0.408 | 36.0 | 1.0 |
749 | 6.0 | 162.0 | 62.0 | 23.0 | 267.0 | 24.3 | 0.178 | 50.0 | 1.0 |
750 | 4.0 | 136.0 | 70.0 | 27.0 | 167.0 | 31.2 | 1.182 | 22.0 | 1.0 |
751 | 1.0 | 121.0 | 78.0 | 39.0 | 74.0 | 39.0 | 0.261 | 28.0 | 0.0 |
752 | 3.0 | 108.0 | 62.0 | 24.0 | 139.0 | 26.0 | 0.223 | 25.0 | 0.0 |
753 | 0.0 | 181.0 | 88.0 | 44.0 | 510.0 | 43.3 | 0.222 | 26.0 | 1.0 |
754 | 8.0 | 154.0 | 78.0 | 32.0 | 144.0 | 32.4 | 0.443 | 45.0 | 1.0 |
755 | 1.0 | 128.0 | 88.0 | 39.0 | 110.0 | 36.5 | 1.057 | 37.0 | 1.0 |
756 | 7.0 | 137.0 | 90.0 | 41.0 | 163.0 | 32.0 | 0.391 | 39.0 | 0.0 |
757 | 0.0 | 123.0 | 72.0 | 24.0 | 310.0 | 36.3 | 0.258 | 52.0 | 1.0 |
758 | 1.0 | 106.0 | 76.0 | 33.0 | 172.0 | 37.5 | 0.197 | 26.0 | 0.0 |
759 | 6.0 | 190.0 | 92.0 | 28.0 | 257.0 | 35.5 | 0.278 | 66.0 | 1.0 |
760 | 2.0 | 88.0 | 58.0 | 26.0 | 16.0 | 28.4 | 0.766 | 22.0 | 0.0 |
761 | 9.0 | 170.0 | 74.0 | 31.0 | 254.0 | 44.0 | 0.403 | 43.0 | 1.0 |
762 | 9.0 | 89.0 | 62.0 | 17.0 | 42.0 | 22.5 | 0.142 | 33.0 | 0.0 |
763 | 10.0 | 101.0 | 76.0 | 48.0 | 180.0 | 32.9 | 0.171 | 63.0 | 0.0 |
764 | 2.0 | 122.0 | 70.0 | 27.0 | 143.0 | 36.8 | 0.340 | 27.0 | 0.0 |
765 | 5.0 | 121.0 | 72.0 | 23.0 | 112.0 | 26.2 | 0.245 | 30.0 | 0.0 |
766 | 1.0 | 126.0 | 60.0 | 35.0 | 121.0 | 30.1 | 0.349 | 47.0 | 1.0 |
767 | 1.0 | 93.0 | 70.0 | 31.0 | 71.0 | 30.4 | 0.315 | 23.0 | 0.0 |
dnoknn = runNoknn(raw_data)
dnoknn
Pregnancies | Glucose | BloodPressure | SkinThickness | Insulin | BMI | DiabetesPedigreeFunction | Age | Outcome | |
---|---|---|---|---|---|---|---|---|---|
0 | 6 | 128 | 69 | 32 | 120 | 28.8 | 1.527 | 29 | 1 |
1 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
2 | 7 | 128 | 68 | 31 | 120 | 27.8 | 1.527 | 29 | 1 |
3 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
4 | 6 | 128 | 68 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
5 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
6 | 6 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 28 | 1 |
7 | 7 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
8 | 6 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 29 | 1 |
9 | 7 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 1 |
10 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
11 | 7 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
12 | 7 | 128 | 69 | 31 | 120 | 27.8 | 1.527 | 29 | 0 |
13 | 6 | 128 | 68 | 31 | 121 | 28.8 | 0.527 | 29 | 1 |
14 | 6 | 128 | 69 | 31 | 121 | 27.8 | 1.527 | 29 | 1 |
15 | 7 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
16 | 6 | 127 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
17 | 7 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
18 | 6 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
19 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
20 | 6 | 127 | 69 | 32 | 121 | 28.8 | 1.527 | 28 | 0 |
21 | 7 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
22 | 7 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
23 | 7 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 1 |
24 | 7 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 29 | 1 |
25 | 7 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
26 | 7 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
27 | 6 | 127 | 68 | 31 | 121 | 27.8 | 0.527 | 28 | 0 |
28 | 7 | 128 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
29 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
30 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 0 |
31 | 6 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 28 | 1 |
32 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
33 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
34 | 7 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
35 | 6 | 127 | 68 | 32 | 121 | 27.8 | 1.527 | 29 | 0 |
36 | 7 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
37 | 7 | 127 | 69 | 32 | 120 | 28.8 | 1.527 | 29 | 1 |
38 | 6 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 28 | 1 |
39 | 6 | 127 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
40 | 6 | 128 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
41 | 7 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 0 |
42 | 7 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
43 | 7 | 128 | 69 | 31 | 121 | 28.8 | 1.527 | 29 | 1 |
44 | 7 | 128 | 68 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
45 | 6 | 128 | 68 | 32 | 120 | 28.8 | 1.527 | 28 | 1 |
46 | 6 | 128 | 68 | 31 | 120 | 28.8 | 1.527 | 29 | 0 |
47 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
48 | 7 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 29 | 1 |
49 | 7 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
50 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
51 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
52 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
53 | 7 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 29 | 1 |
54 | 7 | 128 | 68 | 32 | 121 | 28.8 | 1.527 | 29 | 0 |
55 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
56 | 7 | 128 | 68 | 32 | 121 | 28.8 | 0.527 | 29 | 1 |
57 | 6 | 127 | 69 | 32 | 120 | 28.8 | 1.527 | 29 | 0 |
58 | 6 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 0 |
59 | 6 | 127 | 68 | 32 | 121 | 28.8 | 0.527 | 28 | 0 |
60 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
61 | 7 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
62 | 6 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 29 | 0 |
63 | 6 | 128 | 68 | 32 | 121 | 27.8 | 1.527 | 28 | 0 |
64 | 7 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
65 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
66 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
67 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 0 |
68 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
69 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
70 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 1 |
71 | 6 | 128 | 68 | 32 | 121 | 28.8 | 0.527 | 28 | 0 |
72 | 7 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
73 | 6 | 128 | 69 | 31 | 121 | 28.8 | 0.527 | 28 | 0 |
74 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
75 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
76 | 7 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
77 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
78 | 6 | 128 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 1 |
79 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
80 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
81 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
82 | 7 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 0 |
83 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
84 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
85 | 6 | 127 | 69 | 31 | 121 | 28.8 | 1.527 | 28 | 0 |
86 | 7 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
87 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
88 | 7 | 128 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 1 |
89 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
90 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
91 | 6 | 127 | 69 | 31 | 121 | 28.8 | 0.527 | 29 | 0 |
92 | 7 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
93 | 6 | 128 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 1 |
94 | 6 | 128 | 69 | 31 | 120 | 27.8 | 1.527 | 28 | 0 |
95 | 6 | 128 | 69 | 31 | 121 | 28.8 | 0.527 | 29 | 0 |
96 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
97 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
98 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
99 | 6 | 127 | 69 | 32 | 121 | 28.8 | 0.527 | 29 | 1 |
100 | 6 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
101 | 6 | 128 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
102 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
103 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
104 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
105 | 6 | 127 | 68 | 31 | 121 | 28.8 | 1.527 | 28 | 0 |
106 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
107 | 6 | 128 | 68 | 31 | 121 | 28.8 | 0.527 | 29 | 0 |
108 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
109 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 1 |
110 | 6 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 28 | 1 |
111 | 7 | 128 | 68 | 31 | 121 | 28.8 | 1.527 | 29 | 1 |
112 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
113 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
114 | 7 | 128 | 68 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
115 | 6 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
116 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
117 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
118 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
119 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
120 | 6 | 128 | 69 | 32 | 120 | 28.8 | 1.527 | 28 | 1 |
121 | 6 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
122 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
123 | 6 | 128 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
124 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 1 |
125 | 6 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 28 | 1 |
126 | 6 | 127 | 69 | 31 | 121 | 28.8 | 0.527 | 29 | 0 |
127 | 6 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
128 | 6 | 127 | 69 | 31 | 121 | 28.8 | 0.527 | 29 | 1 |
129 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
130 | 6 | 128 | 69 | 31 | 121 | 28.8 | 0.527 | 29 | 1 |
131 | 7 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
132 | 6 | 128 | 68 | 32 | 121 | 28.8 | 0.527 | 29 | 1 |
133 | 7 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
134 | 6 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 28 | 0 |
135 | 6 | 127 | 68 | 31 | 121 | 28.8 | 0.527 | 29 | 0 |
136 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
137 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
138 | 6 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 0 |
139 | 6 | 127 | 69 | 31 | 121 | 28.8 | 0.527 | 28 | 0 |
140 | 6 | 128 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
141 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
142 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
143 | 7 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
144 | 6 | 128 | 68 | 31 | 121 | 28.8 | 0.527 | 28 | 0 |
145 | 6 | 127 | 69 | 31 | 120 | 27.8 | 1.527 | 28 | 0 |
146 | 7 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
147 | 6 | 127 | 68 | 32 | 120 | 28.8 | 1.527 | 29 | 0 |
148 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
149 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
150 | 6 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 28 | 0 |
151 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
152 | 7 | 128 | 69 | 31 | 121 | 28.8 | 1.527 | 29 | 1 |
153 | 6 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 28 | 0 |
154 | 7 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
155 | 7 | 128 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 1 |
156 | 6 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 28 | 0 |
157 | 6 | 127 | 68 | 31 | 121 | 27.8 | 1.527 | 28 | 0 |
158 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
159 | 7 | 128 | 69 | 32 | 120 | 28.8 | 1.527 | 29 | 1 |
160 | 6 | 128 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
161 | 7 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
162 | 6 | 127 | 69 | 32 | 121 | 28.8 | 0.527 | 28 | 0 |
163 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
164 | 6 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
165 | 6 | 127 | 69 | 31 | 121 | 28.8 | 1.527 | 29 | 1 |
166 | 6 | 128 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
167 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 29 | 0 |
168 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
169 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
170 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
171 | 6 | 128 | 69 | 31 | 121 | 28.8 | 1.527 | 29 | 1 |
172 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
173 | 6 | 127 | 68 | 32 | 120 | 28.8 | 1.527 | 28 | 0 |
174 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
175 | 7 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
176 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
177 | 6 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 28 | 1 |
178 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
179 | 6 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
180 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
181 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
182 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
183 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
184 | 6 | 128 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
185 | 7 | 128 | 68 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
186 | 7 | 128 | 68 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
187 | 6 | 128 | 69 | 32 | 120 | 28.8 | 1.527 | 29 | 1 |
188 | 7 | 127 | 69 | 32 | 120 | 28.8 | 1.527 | 29 | 1 |
189 | 6 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 28 | 1 |
190 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
191 | 7 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
192 | 7 | 128 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
193 | 7 | 128 | 68 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
194 | 7 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
195 | 6 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 29 | 1 |
196 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
197 | 6 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 28 | 1 |
198 | 6 | 127 | 68 | 32 | 120 | 28.8 | 1.527 | 28 | 1 |
199 | 6 | 128 | 68 | 31 | 121 | 28.8 | 0.527 | 29 | 1 |
200 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
201 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
202 | 6 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 29 | 0 |
203 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
204 | 6 | 127 | 69 | 32 | 121 | 28.8 | 0.527 | 29 | 0 |
205 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
206 | 7 | 128 | 69 | 31 | 121 | 28.8 | 1.527 | 29 | 1 |
207 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
208 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
209 | 7 | 128 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 1 |
210 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
211 | 6 | 128 | 69 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
212 | 7 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
213 | 6 | 128 | 68 | 31 | 121 | 28.8 | 0.527 | 28 | 1 |
214 | 7 | 127 | 69 | 32 | 121 | 28.8 | 0.527 | 29 | 1 |
215 | 7 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
216 | 6 | 127 | 68 | 32 | 121 | 28.8 | 0.527 | 28 | 1 |
217 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
218 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
219 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
220 | 6 | 128 | 68 | 31 | 121 | 28.8 | 1.527 | 28 | 1 |
221 | 6 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
222 | 7 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
223 | 7 | 128 | 68 | 32 | 121 | 28.8 | 1.527 | 29 | 0 |
224 | 6 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 28 | 0 |
225 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
226 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
227 | 6 | 128 | 68 | 32 | 120 | 28.8 | 1.527 | 28 | 1 |
228 | 6 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 0 |
229 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
230 | 6 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 28 | 1 |
231 | 6 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 29 | 1 |
232 | 6 | 127 | 69 | 31 | 120 | 27.8 | 1.527 | 28 | 0 |
233 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
234 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
235 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 1 |
236 | 7 | 128 | 69 | 31 | 121 | 28.8 | 1.527 | 29 | 1 |
237 | 6 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 28 | 1 |
238 | 7 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
239 | 6 | 127 | 69 | 31 | 120 | 27.8 | 1.527 | 28 | 0 |
240 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
241 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
242 | 6 | 128 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 1 |
243 | 6 | 127 | 68 | 31 | 121 | 27.8 | 1.527 | 29 | 1 |
244 | 6 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 29 | 0 |
245 | 7 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
246 | 7 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
247 | 6 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 28 | 0 |
248 | 7 | 127 | 69 | 32 | 121 | 28.8 | 0.527 | 29 | 0 |
249 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
250 | 7 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
251 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
252 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
253 | 6 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
254 | 7 | 127 | 68 | 31 | 121 | 27.8 | 1.527 | 29 | 1 |
255 | 6 | 127 | 68 | 32 | 120 | 28.8 | 1.527 | 28 | 1 |
256 | 6 | 127 | 68 | 32 | 120 | 28.8 | 1.527 | 29 | 0 |
257 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
258 | 6 | 128 | 68 | 31 | 121 | 27.8 | 1.527 | 28 | 0 |
259 | 7 | 128 | 69 | 31 | 121 | 28.8 | 1.527 | 29 | 1 |
260 | 6 | 128 | 68 | 31 | 121 | 28.8 | 0.527 | 29 | 0 |
261 | 6 | 128 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 1 |
262 | 6 | 127 | 69 | 32 | 120 | 28.8 | 1.527 | 28 | 0 |
263 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
264 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
265 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 0 |
266 | 6 | 128 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 1 |
267 | 6 | 128 | 68 | 32 | 120 | 28.8 | 1.527 | 28 | 0 |
268 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
269 | 6 | 128 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 1 |
270 | 7 | 127 | 69 | 32 | 120 | 28.8 | 1.527 | 29 | 1 |
271 | 6 | 127 | 68 | 32 | 120 | 27.8 | 0.527 | 28 | 0 |
272 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
273 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
274 | 7 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
275 | 6 | 127 | 69 | 32 | 120 | 28.8 | 1.527 | 28 | 0 |
276 | 7 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 29 | 1 |
277 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
278 | 6 | 127 | 69 | 31 | 120 | 27.8 | 1.527 | 29 | 0 |
279 | 6 | 127 | 68 | 31 | 121 | 27.8 | 1.527 | 28 | 0 |
280 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 1 |
281 | 7 | 128 | 69 | 31 | 121 | 28.8 | 0.527 | 29 | 0 |
282 | 7 | 128 | 69 | 31 | 121 | 28.8 | 0.527 | 29 | 0 |
283 | 7 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
284 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 1 |
285 | 7 | 128 | 69 | 31 | 121 | 27.8 | 1.527 | 29 | 0 |
286 | 6 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 0 |
287 | 6 | 127 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
288 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
289 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
290 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
291 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 1 |
292 | 6 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
293 | 6 | 128 | 68 | 32 | 121 | 28.8 | 1.527 | 28 | 1 |
294 | 6 | 128 | 68 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
295 | 6 | 128 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
296 | 6 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 29 | 1 |
297 | 6 | 127 | 69 | 31 | 121 | 28.8 | 0.527 | 28 | 0 |
298 | 7 | 127 | 69 | 31 | 121 | 28.8 | 0.527 | 29 | 1 |
299 | 7 | 127 | 69 | 31 | 120 | 27.8 | 1.527 | 29 | 0 |
300 | 6 | 128 | 68 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
301 | 6 | 128 | 68 | 32 | 121 | 28.8 | 0.527 | 28 | 1 |
302 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
303 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 1 |
304 | 6 | 128 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
305 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
306 | 7 | 128 | 68 | 31 | 121 | 27.8 | 0.527 | 29 | 1 |
307 | 6 | 128 | 68 | 31 | 121 | 27.8 | 0.527 | 28 | 0 |
308 | 6 | 128 | 68 | 31 | 121 | 28.8 | 1.527 | 28 | 1 |
309 | 6 | 127 | 68 | 31 | 121 | 28.8 | 1.527 | 29 | 1 |
310 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
311 | 6 | 127 | 69 | 32 | 121 | 28.8 | 1.527 | 28 | 0 |
312 | 6 | 128 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 1 |
313 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
314 | 7 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
315 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
316 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
317 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
318 | 6 | 127 | 68 | 32 | 121 | 28.8 | 0.527 | 28 | 0 |
319 | 6 | 128 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 1 |
320 | 6 | 128 | 68 | 31 | 121 | 27.8 | 0.527 | 29 | 0 |
321 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 1 |
322 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 1 |
323 | 7 | 128 | 69 | 32 | 120 | 27.8 | 1.527 | 29 | 1 |
324 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
325 | 6 | 128 | 69 | 31 | 121 | 27.8 | 0.527 | 28 | 0 |
326 | 6 | 127 | 68 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
327 | 7 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
328 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 28 | 1 |
329 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
330 | 7 | 127 | 69 | 31 | 120 | 27.8 | 1.527 | 29 | 0 |
331 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
332 | 6 | 128 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
333 | 7 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
334 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
335 | 6 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 28 | 0 |
336 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 29 | 0 |
337 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
338 | 7 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
339 | 7 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
340 | 6 | 128 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
341 | 6 | 127 | 69 | 31 | 120 | 27.8 | 1.527 | 29 | 0 |
342 | 6 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
343 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
344 | 7 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
345 | 7 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
346 | 6 | 128 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
347 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
348 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
349 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 1 |
350 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
351 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
352 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
353 | 6 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 28 | 0 |
354 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
355 | 7 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
356 | 6 | 127 | 68 | 32 | 121 | 28.8 | 1.527 | 28 | 1 |
357 | 7 | 128 | 68 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
358 | 7 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
359 | 6 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
360 | 6 | 128 | 68 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
361 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
362 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
363 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
364 | 6 | 128 | 69 | 31 | 121 | 28.8 | 0.527 | 29 | 0 |
365 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
366 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 1 |
367 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
368 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
369 | 6 | 128 | 69 | 31 | 121 | 28.8 | 0.527 | 29 | 1 |
370 | 6 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 28 | 1 |
371 | 6 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 28 | 0 |
372 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
373 | 6 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
374 | 6 | 127 | 68 | 32 | 121 | 28.8 | 1.527 | 28 | 0 |
375 | 7 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
376 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
377 | 6 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
378 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
379 | 6 | 127 | 69 | 32 | 120 | 28.8 | 1.527 | 29 | 0 |
380 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
381 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
382 | 6 | 127 | 68 | 31 | 121 | 27.8 | 1.527 | 28 | 0 |
383 | 6 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 28 | 0 |
384 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
385 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
386 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
387 | 7 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 1 |
388 | 6 | 128 | 69 | 31 | 121 | 28.8 | 0.527 | 29 | 1 |
389 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
390 | 6 | 127 | 68 | 31 | 121 | 28.8 | 0.527 | 29 | 0 |
391 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 1 |
392 | 6 | 128 | 68 | 31 | 121 | 27.8 | 0.527 | 28 | 0 |
393 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
394 | 6 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
395 | 6 | 127 | 68 | 31 | 121 | 27.8 | 1.527 | 28 | 0 |
396 | 6 | 127 | 68 | 32 | 120 | 27.8 | 1.527 | 29 | 0 |
397 | 6 | 128 | 68 | 32 | 120 | 28.8 | 0.527 | 28 | 1 |
398 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
399 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 1 |
400 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
401 | 6 | 128 | 68 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
402 | 6 | 128 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 1 |
403 | 7 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
404 | 6 | 128 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
405 | 6 | 127 | 68 | 32 | 121 | 28.8 | 0.527 | 28 | 0 |
406 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
407 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
408 | 7 | 128 | 69 | 31 | 120 | 27.8 | 1.527 | 29 | 1 |
409 | 6 | 128 | 68 | 32 | 121 | 28.8 | 1.527 | 28 | 1 |
410 | 6 | 127 | 69 | 32 | 120 | 28.8 | 1.527 | 28 | 0 |
411 | 6 | 127 | 69 | 31 | 121 | 28.8 | 1.527 | 28 | 0 |
412 | 6 | 128 | 69 | 31 | 121 | 28.8 | 1.527 | 28 | 0 |
413 | 6 | 128 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
414 | 6 | 128 | 68 | 32 | 121 | 28.8 | 1.527 | 28 | 1 |
415 | 6 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 28 | 1 |
416 | 6 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 28 | 0 |
417 | 6 | 128 | 69 | 32 | 120 | 28.8 | 1.527 | 29 | 1 |
418 | 6 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 28 | 0 |
419 | 6 | 128 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 1 |
420 | 6 | 127 | 69 | 32 | 121 | 28.8 | 0.527 | 28 | 0 |
421 | 6 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 28 | 0 |
422 | 6 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
423 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
424 | 7 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 29 | 1 |
425 | 6 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 29 | 1 |
426 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
427 | 6 | 128 | 68 | 31 | 121 | 28.8 | 0.527 | 29 | 1 |
428 | 6 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 28 | 0 |
429 | 6 | 127 | 69 | 31 | 121 | 28.8 | 0.527 | 29 | 1 |
430 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
431 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 0 |
432 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
433 | 6 | 128 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
434 | 6 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 29 | 0 |
435 | 6 | 128 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
436 | 7 | 128 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
437 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
438 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
439 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 0 |
440 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
441 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
442 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
443 | 7 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
444 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
445 | 6 | 128 | 69 | 32 | 120 | 28.8 | 1.527 | 28 | 1 |
446 | 6 | 127 | 69 | 31 | 120 | 27.8 | 1.527 | 28 | 0 |
447 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
448 | 6 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 28 | 1 |
449 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
450 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
451 | 6 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 28 | 1 |
452 | 6 | 127 | 68 | 32 | 121 | 28.8 | 0.527 | 28 | 0 |
453 | 6 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 29 | 0 |
454 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
455 | 7 | 128 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
456 | 6 | 128 | 68 | 31 | 120 | 27.8 | 1.527 | 29 | 0 |
457 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
458 | 7 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
459 | 7 | 128 | 69 | 32 | 120 | 27.8 | 0.527 | 29 | 0 |
460 | 7 | 127 | 69 | 31 | 120 | 27.8 | 1.527 | 29 | 0 |
461 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
462 | 7 | 127 | 69 | 32 | 120 | 28.8 | 1.527 | 29 | 0 |
463 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
464 | 7 | 127 | 69 | 31 | 120 | 27.8 | 1.527 | 29 | 0 |
465 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
466 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
467 | 6 | 127 | 68 | 32 | 120 | 28.8 | 1.527 | 28 | 0 |
468 | 7 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
469 | 6 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 28 | 0 |
470 | 6 | 128 | 69 | 32 | 120 | 28.8 | 1.527 | 28 | 0 |
471 | 6 | 128 | 69 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
472 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
473 | 7 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
474 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
475 | 6 | 128 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
476 | 6 | 127 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
477 | 7 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
478 | 7 | 127 | 69 | 32 | 120 | 27.8 | 0.527 | 29 | 0 |
479 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
480 | 6 | 128 | 69 | 31 | 121 | 28.8 | 0.527 | 29 | 1 |
481 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
482 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
483 | 6 | 127 | 69 | 31 | 121 | 28.8 | 0.527 | 28 | 0 |
484 | 6 | 128 | 68 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
485 | 6 | 128 | 68 | 32 | 121 | 28.8 | 0.527 | 28 | 1 |
486 | 6 | 128 | 68 | 32 | 121 | 28.8 | 1.527 | 28 | 0 |
487 | 6 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 0 |
488 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
489 | 7 | 128 | 69 | 31 | 120 | 27.8 | 1.527 | 29 | 0 |
490 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
491 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
492 | 6 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
493 | 6 | 127 | 69 | 31 | 121 | 28.8 | 1.527 | 29 | 1 |
494 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
495 | 6 | 128 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
496 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
497 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
498 | 7 | 128 | 69 | 32 | 121 | 27.8 | 0.527 | 29 | 1 |
499 | 6 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 0 |
500 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
501 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
502 | 6 | 127 | 68 | 32 | 120 | 28.8 | 1.527 | 29 | 1 |
503 | 7 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 29 | 0 |
504 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
505 | 7 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
506 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
507 | 6 | 128 | 68 | 31 | 121 | 28.8 | 1.527 | 28 | 0 |
508 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
509 | 7 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
510 | 7 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
511 | 6 | 128 | 68 | 31 | 121 | 27.8 | 0.527 | 28 | 0 |
512 | 7 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
513 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
514 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
515 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 1 |
516 | 7 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
517 | 7 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
518 | 7 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
519 | 6 | 128 | 69 | 31 | 121 | 27.8 | 1.527 | 29 | 0 |
520 | 6 | 127 | 69 | 32 | 120 | 27.8 | 0.527 | 28 | 0 |
521 | 6 | 127 | 69 | 32 | 121 | 28.8 | 0.527 | 28 | 0 |
522 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
523 | 7 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
524 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
525 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
526 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
527 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
528 | 6 | 127 | 68 | 31 | 121 | 28.8 | 0.527 | 28 | 0 |
529 | 6 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 29 | 0 |
530 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
531 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
532 | 6 | 127 | 68 | 32 | 120 | 28.8 | 1.527 | 29 | 0 |
533 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
534 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
535 | 6 | 128 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 1 |
536 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
537 | 6 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 29 | 0 |
538 | 6 | 127 | 69 | 32 | 121 | 28.8 | 1.527 | 28 | 0 |
539 | 6 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
540 | 7 | 127 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
541 | 6 | 128 | 69 | 31 | 121 | 28.8 | 1.527 | 28 | 1 |
542 | 7 | 127 | 69 | 32 | 120 | 28.8 | 1.527 | 29 | 1 |
543 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
544 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
545 | 7 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 29 | 1 |
546 | 6 | 128 | 69 | 31 | 121 | 28.8 | 1.527 | 29 | 1 |
547 | 6 | 128 | 68 | 31 | 121 | 28.8 | 0.527 | 28 | 0 |
548 | 6 | 128 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
549 | 6 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 0 |
550 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
551 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
552 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
553 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
554 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
555 | 7 | 127 | 69 | 32 | 121 | 27.8 | 0.527 | 29 | 0 |
556 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
557 | 7 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
558 | 7 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
559 | 7 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
560 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
561 | 6 | 128 | 68 | 32 | 121 | 28.8 | 0.527 | 28 | 1 |
562 | 6 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
563 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
564 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
565 | 6 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 28 | 0 |
566 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
567 | 6 | 127 | 68 | 32 | 121 | 28.8 | 0.527 | 29 | 0 |
568 | 6 | 128 | 69 | 31 | 121 | 28.8 | 0.527 | 29 | 0 |
569 | 6 | 127 | 68 | 31 | 121 | 28.8 | 0.527 | 29 | 1 |
570 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
571 | 6 | 128 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
572 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
573 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
574 | 6 | 128 | 69 | 31 | 121 | 28.8 | 1.527 | 28 | 0 |
575 | 6 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
576 | 6 | 127 | 68 | 31 | 121 | 27.8 | 1.527 | 29 | 0 |
577 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 28 | 1 |
578 | 7 | 128 | 68 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
579 | 6 | 128 | 69 | 32 | 120 | 28.8 | 1.527 | 29 | 1 |
580 | 6 | 128 | 69 | 32 | 120 | 28.8 | 0.527 | 28 | 1 |
581 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
582 | 7 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
583 | 7 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
584 | 7 | 127 | 69 | 31 | 121 | 28.8 | 1.527 | 29 | 1 |
585 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
586 | 7 | 128 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
587 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
588 | 6 | 128 | 69 | 31 | 121 | 28.8 | 1.527 | 29 | 1 |
589 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
590 | 7 | 127 | 69 | 32 | 120 | 28.8 | 1.527 | 29 | 1 |
591 | 6 | 127 | 69 | 32 | 121 | 28.8 | 0.527 | 28 | 0 |
592 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
593 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
594 | 6 | 127 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 0 |
595 | 6 | 128 | 69 | 31 | 121 | 28.8 | 1.527 | 28 | 1 |
596 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
597 | 6 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 28 | 0 |
598 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
599 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
600 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
601 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
602 | 6 | 127 | 69 | 32 | 120 | 27.8 | 0.527 | 29 | 0 |
603 | 7 | 128 | 69 | 31 | 121 | 28.8 | 1.527 | 29 | 1 |
604 | 6 | 128 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
605 | 6 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
606 | 6 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 28 | 1 |
607 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
608 | 6 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 28 | 0 |
609 | 6 | 127 | 68 | 31 | 121 | 27.8 | 0.527 | 28 | 0 |
610 | 6 | 127 | 68 | 31 | 121 | 28.8 | 0.527 | 28 | 0 |
611 | 6 | 128 | 68 | 31 | 121 | 28.8 | 1.527 | 29 | 1 |
612 | 7 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
613 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
614 | 7 | 128 | 69 | 31 | 121 | 28.8 | 1.527 | 29 | 1 |
615 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
616 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
617 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
618 | 7 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
619 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 1 |
620 | 6 | 127 | 69 | 32 | 121 | 28.8 | 0.527 | 28 | 0 |
621 | 6 | 127 | 69 | 31 | 120 | 27.8 | 1.527 | 28 | 0 |
622 | 6 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 0 |
623 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
624 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
625 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
626 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
627 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
628 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
629 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
630 | 7 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 29 | 1 |
631 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
632 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
633 | 6 | 128 | 69 | 31 | 121 | 27.8 | 0.527 | 28 | 0 |
634 | 7 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
635 | 7 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
636 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
637 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
638 | 7 | 127 | 69 | 32 | 120 | 28.8 | 1.527 | 29 | 1 |
639 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
640 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
641 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
642 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
643 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 29 | 0 |
644 | 6 | 127 | 69 | 31 | 121 | 27.8 | 1.527 | 28 | 0 |
645 | 6 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 29 | 0 |
646 | 6 | 128 | 69 | 31 | 121 | 27.8 | 0.527 | 29 | 1 |
647 | 6 | 128 | 68 | 32 | 121 | 28.8 | 0.527 | 28 | 1 |
648 | 7 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 29 | 1 |
649 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
650 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
651 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
652 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
653 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
654 | 6 | 127 | 69 | 31 | 121 | 28.8 | 0.527 | 28 | 0 |
655 | 6 | 128 | 68 | 31 | 121 | 28.8 | 0.527 | 28 | 1 |
656 | 6 | 127 | 68 | 32 | 120 | 27.8 | 0.527 | 28 | 0 |
657 | 6 | 127 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 0 |
658 | 7 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
659 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 28 | 1 |
660 | 7 | 128 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
661 | 6 | 128 | 69 | 32 | 120 | 28.8 | 1.527 | 28 | 1 |
662 | 7 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 29 | 1 |
663 | 7 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
664 | 6 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 29 | 1 |
665 | 6 | 127 | 69 | 32 | 121 | 28.8 | 0.527 | 28 | 0 |
666 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
667 | 7 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 1 |
668 | 6 | 127 | 68 | 32 | 121 | 28.8 | 0.527 | 29 | 0 |
669 | 7 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
670 | 6 | 128 | 68 | 31 | 121 | 28.8 | 1.527 | 29 | 0 |
671 | 6 | 127 | 68 | 31 | 120 | 27.8 | 1.527 | 28 | 0 |
672 | 7 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
673 | 6 | 127 | 69 | 32 | 121 | 28.8 | 1.527 | 28 | 0 |
674 | 7 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 0 |
675 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
676 | 7 | 128 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 1 |
677 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
678 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 1 |
679 | 6 | 127 | 68 | 31 | 121 | 27.8 | 1.527 | 28 | 0 |
680 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
681 | 6 | 128 | 69 | 32 | 120 | 28.8 | 0.527 | 28 | 1 |
682 | 6 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
683 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 28 | 1 |
684 | 6 | 128 | 69 | 31 | 120 | 27.8 | 1.527 | 29 | 0 |
685 | 6 | 128 | 69 | 31 | 121 | 28.8 | 1.527 | 28 | 0 |
686 | 6 | 128 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
687 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
688 | 6 | 128 | 69 | 31 | 121 | 27.8 | 1.527 | 28 | 0 |
689 | 6 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 29 | 1 |
690 | 7 | 127 | 69 | 31 | 120 | 27.8 | 1.527 | 29 | 0 |
691 | 7 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
692 | 6 | 127 | 69 | 32 | 120 | 28.8 | 1.527 | 28 | 0 |
693 | 7 | 128 | 68 | 32 | 121 | 28.8 | 0.527 | 29 | 1 |
694 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
695 | 7 | 128 | 69 | 31 | 121 | 28.8 | 0.527 | 29 | 1 |
696 | 6 | 128 | 69 | 31 | 121 | 28.8 | 0.527 | 29 | 1 |
697 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
698 | 6 | 127 | 69 | 31 | 121 | 28.8 | 1.527 | 28 | 0 |
699 | 6 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
700 | 6 | 127 | 69 | 31 | 121 | 28.8 | 0.527 | 28 | 0 |
701 | 6 | 127 | 69 | 31 | 120 | 27.8 | 1.527 | 29 | 1 |
702 | 6 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
703 | 6 | 128 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
704 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
705 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
706 | 7 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 29 | 1 |
707 | 6 | 127 | 68 | 31 | 121 | 28.8 | 0.527 | 28 | 0 |
708 | 7 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
709 | 6 | 127 | 68 | 32 | 121 | 28.8 | 1.527 | 28 | 1 |
710 | 6 | 128 | 68 | 31 | 121 | 28.8 | 0.527 | 28 | 0 |
711 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
712 | 7 | 128 | 68 | 32 | 120 | 28.8 | 0.527 | 29 | 1 |
713 | 6 | 128 | 68 | 31 | 121 | 27.8 | 0.527 | 28 | 0 |
714 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
715 | 7 | 128 | 68 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
716 | 6 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
717 | 7 | 127 | 69 | 31 | 120 | 27.8 | 1.527 | 29 | 0 |
718 | 6 | 127 | 68 | 32 | 121 | 28.8 | 0.527 | 28 | 0 |
719 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
720 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
721 | 6 | 127 | 68 | 32 | 121 | 28.8 | 0.527 | 28 | 0 |
722 | 6 | 128 | 68 | 31 | 121 | 28.8 | 0.527 | 29 | 1 |
723 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
724 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 0 |
725 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
726 | 6 | 127 | 69 | 31 | 121 | 28.8 | 0.527 | 28 | 0 |
727 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
728 | 6 | 128 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
729 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
730 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
731 | 7 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 1 |
732 | 6 | 128 | 69 | 32 | 120 | 28.8 | 1.527 | 28 | 1 |
733 | 6 | 127 | 68 | 31 | 121 | 28.8 | 0.527 | 28 | 0 |
734 | 6 | 127 | 69 | 31 | 120 | 27.8 | 1.527 | 29 | 0 |
735 | 6 | 127 | 68 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
736 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
737 | 7 | 127 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 0 |
738 | 6 | 127 | 68 | 31 | 121 | 28.8 | 0.527 | 28 | 0 |
739 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
740 | 7 | 127 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 1 |
741 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
742 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
743 | 7 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 29 | 1 |
744 | 7 | 128 | 69 | 32 | 121 | 28.8 | 1.527 | 29 | 0 |
745 | 7 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
746 | 6 | 128 | 69 | 32 | 120 | 28.8 | 0.527 | 28 | 1 |
747 | 6 | 127 | 69 | 32 | 120 | 28.8 | 1.527 | 29 | 0 |
748 | 6 | 128 | 69 | 31 | 121 | 28.8 | 0.527 | 29 | 1 |
749 | 6 | 128 | 68 | 31 | 120 | 27.8 | 0.527 | 29 | 1 |
750 | 6 | 128 | 69 | 31 | 120 | 28.8 | 1.527 | 28 | 1 |
751 | 6 | 127 | 69 | 32 | 120 | 28.8 | 0.527 | 28 | 0 |
752 | 6 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 28 | 0 |
753 | 6 | 128 | 69 | 32 | 121 | 28.8 | 0.527 | 28 | 1 |
754 | 7 | 128 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 1 |
755 | 6 | 128 | 69 | 32 | 120 | 28.8 | 1.527 | 29 | 1 |
756 | 7 | 128 | 69 | 32 | 120 | 28.8 | 0.527 | 29 | 0 |
757 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
758 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
759 | 6 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
760 | 6 | 127 | 68 | 31 | 120 | 28.8 | 1.527 | 28 | 0 |
761 | 7 | 128 | 69 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
762 | 7 | 127 | 68 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
763 | 7 | 127 | 69 | 32 | 121 | 28.8 | 0.527 | 29 | 0 |
764 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
765 | 6 | 127 | 69 | 31 | 120 | 27.8 | 0.527 | 29 | 0 |
766 | 6 | 127 | 68 | 31 | 120 | 28.8 | 0.527 | 29 | 1 |
767 | 6 | 127 | 69 | 31 | 120 | 28.8 | 0.527 | 28 | 0 |
dwithknn = runAll(raw_data)
dwithknn
{'Pregnancies': (0.039180260954807755, 6.0), 'Glucose': (0.13518841941127935, 127.0), 'BloodPressure': (0.018731718578645196, 63.0), 'SkinThickness': (0.07250945002996534, 23.0), 'Insulin': (0.14435516834581175, 108.0), 'BMI': (0.07469977982812326, 27.8), 'DiabetesPedigreeFunction': (0.02079639637723707, 0.527), 'Age': (0.0724727100056407, 28.0)}
Pregnancies | Glucose | BloodPressure | SkinThickness | Insulin | BMI | DiabetesPedigreeFunction | Age | Outcome | |
---|---|---|---|---|---|---|---|---|---|
0 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
1 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 27.8 | 0.527 | 29.0 | 0.0 |
2 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 27.8 | 1.527 | 29.0 | 1.0 |
3 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
4 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
5 | 6.0 | 127.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
6 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 1.0 |
7 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
8 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
9 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
10 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
11 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
12 | 7.0 | 128.0 | 64.0 | 23.0 | 109.0 | 27.8 | 1.527 | 29.0 | 0.0 |
13 | 6.0 | 128.0 | 63.0 | 23.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
14 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 27.8 | 1.527 | 29.0 | 1.0 |
15 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
16 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
17 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
18 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
19 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 29.0 | 1.0 |
20 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
21 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
22 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
23 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
24 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
25 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
26 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
27 | 6.0 | 127.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
28 | 7.0 | 128.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
29 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
30 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
31 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
32 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
33 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
34 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 27.8 | 0.527 | 29.0 | 0.0 |
35 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 27.8 | 1.527 | 29.0 | 0.0 |
36 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
37 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
38 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 1.0 |
39 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
40 | 6.0 | 128.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
41 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
42 | 7.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 29.0 | 0.0 |
43 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
44 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
45 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
46 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
47 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
48 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 1.0 |
49 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
50 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
51 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
52 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 29.0 | 0.0 |
53 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
54 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
55 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
56 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
57 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
58 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
59 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
60 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
61 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
62 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 27.8 | 1.527 | 29.0 | 0.0 |
63 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 27.8 | 1.527 | 28.0 | 0.0 |
64 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
65 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
66 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
67 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
68 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
69 | 6.0 | 128.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
70 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 1.527 | 28.0 | 1.0 |
71 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
72 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
73 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
74 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
75 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
76 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
77 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
78 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
79 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
80 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
81 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
82 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 29.0 | 0.0 |
83 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
84 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
85 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
86 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
87 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
88 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
89 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
90 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
91 | 6.0 | 127.0 | 64.0 | 23.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
92 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
93 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 1.0 |
94 | 6.0 | 128.0 | 64.0 | 23.0 | 108.0 | 27.8 | 1.527 | 28.0 | 0.0 |
95 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
96 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
97 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
98 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
99 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
100 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
101 | 6.0 | 128.0 | 63.0 | 23.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
102 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
103 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
104 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
105 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
106 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
107 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
108 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
109 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 1.0 |
110 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
111 | 7.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
112 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
113 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
114 | 7.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
115 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
116 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 1.0 |
117 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
118 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
119 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
120 | 6.0 | 128.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 1.0 |
121 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
122 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
123 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
124 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
125 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 1.0 |
126 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
127 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
128 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
129 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
130 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
131 | 7.0 | 127.0 | 63.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
132 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
133 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
134 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 1.527 | 28.0 | 0.0 |
135 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
136 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
137 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
138 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
139 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
140 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
141 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
142 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
143 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
144 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
145 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 1.527 | 28.0 | 0.0 |
146 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
147 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
148 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
149 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
150 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
151 | 6.0 | 127.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
152 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
153 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
154 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
155 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
156 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 1.527 | 28.0 | 0.0 |
157 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 27.8 | 1.527 | 28.0 | 0.0 |
158 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
159 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
160 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
161 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
162 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
163 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
164 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
165 | 6.0 | 127.0 | 64.0 | 23.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
166 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
167 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
168 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
169 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
170 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
171 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
172 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
173 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
174 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
175 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
176 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
177 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
178 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
179 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
180 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 27.8 | 0.527 | 29.0 | 0.0 |
181 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
182 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
183 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
184 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
185 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
186 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
187 | 6.0 | 128.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 29.0 | 1.0 |
188 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
189 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
190 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
191 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
192 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
193 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
194 | 7.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 29.0 | 0.0 |
195 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
196 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
197 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 1.527 | 28.0 | 1.0 |
198 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 1.0 |
199 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
200 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
201 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
202 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 1.527 | 29.0 | 0.0 |
203 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
204 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
205 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
206 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
207 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
208 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
209 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
210 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
211 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
212 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
213 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
214 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
215 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
216 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
217 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
218 | 6.0 | 127.0 | 64.0 | 23.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
219 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
220 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
221 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
222 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
223 | 7.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
224 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 1.527 | 28.0 | 0.0 |
225 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
226 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
227 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
228 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
229 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
230 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
231 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
232 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 27.8 | 1.527 | 28.0 | 0.0 |
233 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
234 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
235 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
236 | 7.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
237 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
238 | 7.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
239 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 1.527 | 28.0 | 0.0 |
240 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
241 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
242 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 27.8 | 0.527 | 28.0 | 1.0 |
243 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 27.8 | 1.527 | 29.0 | 1.0 |
244 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
245 | 7.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
246 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
247 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
248 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
249 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
250 | 7.0 | 127.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
251 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
252 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
253 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
254 | 7.0 | 127.0 | 63.0 | 23.0 | 109.0 | 27.8 | 1.527 | 29.0 | 1.0 |
255 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 1.0 |
256 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 1.527 | 29.0 | 0.0 |
257 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
258 | 6.0 | 128.0 | 63.0 | 23.0 | 109.0 | 27.8 | 1.527 | 28.0 | 0.0 |
259 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
260 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
261 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
262 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
263 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
264 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
265 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 1.527 | 29.0 | 0.0 |
266 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
267 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
268 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
269 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 27.8 | 0.527 | 28.0 | 1.0 |
270 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
271 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
272 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 27.8 | 0.527 | 29.0 | 0.0 |
273 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
274 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
275 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
276 | 7.0 | 127.0 | 63.0 | 24.0 | 108.0 | 27.8 | 0.527 | 29.0 | 1.0 |
277 | 6.0 | 127.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
278 | 6.0 | 127.0 | 64.0 | 23.0 | 109.0 | 27.8 | 1.527 | 29.0 | 0.0 |
279 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 27.8 | 1.527 | 28.0 | 0.0 |
280 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
281 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
282 | 7.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
283 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
284 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 1.0 |
285 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 27.8 | 1.527 | 29.0 | 0.0 |
286 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
287 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
288 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
289 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
290 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
291 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 1.0 |
292 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
293 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
294 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
295 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
296 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
297 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
298 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
299 | 7.0 | 127.0 | 64.0 | 23.0 | 109.0 | 27.8 | 1.527 | 29.0 | 0.0 |
300 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
301 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
302 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
303 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
304 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
305 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
306 | 7.0 | 128.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 29.0 | 1.0 |
307 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
308 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
309 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
310 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 27.8 | 0.527 | 29.0 | 0.0 |
311 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
312 | 6.0 | 128.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 1.0 |
313 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
314 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
315 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
316 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 29.0 | 0.0 |
317 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
318 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
319 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 1.0 |
320 | 6.0 | 128.0 | 63.0 | 23.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
321 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 1.0 |
322 | 6.0 | 127.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 29.0 | 1.0 |
323 | 7.0 | 128.0 | 64.0 | 24.0 | 108.0 | 27.8 | 1.527 | 29.0 | 1.0 |
324 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
325 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
326 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
327 | 7.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
328 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
329 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
330 | 7.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 1.527 | 29.0 | 0.0 |
331 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
332 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
333 | 7.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 29.0 | 0.0 |
334 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
335 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
336 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
337 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
338 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
339 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
340 | 6.0 | 128.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
341 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 1.527 | 29.0 | 0.0 |
342 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
343 | 6.0 | 127.0 | 64.0 | 23.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
344 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
345 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
346 | 6.0 | 128.0 | 63.0 | 23.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
347 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
348 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
349 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
350 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
351 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
352 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
353 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 1.527 | 28.0 | 0.0 |
354 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
355 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
356 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
357 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
358 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
359 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
360 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
361 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
362 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
363 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
364 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
365 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
366 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 1.0 |
367 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
368 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
369 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
370 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
371 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
372 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
373 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
374 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
375 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
376 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
377 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
378 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
379 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 29.0 | 0.0 |
380 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
381 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
382 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 27.8 | 1.527 | 28.0 | 0.0 |
383 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 1.527 | 28.0 | 0.0 |
384 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
385 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
386 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
387 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
388 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
389 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
390 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
391 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
392 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
393 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 29.0 | 0.0 |
394 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
395 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 27.8 | 1.527 | 28.0 | 0.0 |
396 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 27.8 | 1.527 | 29.0 | 0.0 |
397 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
398 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
399 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
400 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 1.0 |
401 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
402 | 6.0 | 128.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 1.0 |
403 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
404 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
405 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
406 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
407 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
408 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 27.8 | 1.527 | 29.0 | 1.0 |
409 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
410 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
411 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
412 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
413 | 6.0 | 128.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
414 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
415 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
416 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 1.527 | 28.0 | 0.0 |
417 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
418 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 1.527 | 28.0 | 0.0 |
419 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 28.0 | 1.0 |
420 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
421 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 1.527 | 28.0 | 0.0 |
422 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
423 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
424 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
425 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
426 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
427 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
428 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
429 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
430 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
431 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 1.527 | 29.0 | 0.0 |
432 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
433 | 6.0 | 128.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 29.0 | 0.0 |
434 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 1.527 | 29.0 | 0.0 |
435 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
436 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
437 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
438 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
439 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
440 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
441 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
442 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
443 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
444 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
445 | 6.0 | 128.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 1.0 |
446 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 1.527 | 28.0 | 0.0 |
447 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
448 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 1.0 |
449 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
450 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
451 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
452 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
453 | 6.0 | 127.0 | 64.0 | 23.0 | 109.0 | 27.8 | 1.527 | 29.0 | 0.0 |
454 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
455 | 7.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
456 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 27.8 | 1.527 | 29.0 | 0.0 |
457 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
458 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
459 | 7.0 | 128.0 | 64.0 | 24.0 | 108.0 | 27.8 | 0.527 | 29.0 | 0.0 |
460 | 7.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 1.527 | 29.0 | 0.0 |
461 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
462 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 29.0 | 0.0 |
463 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 27.8 | 0.527 | 29.0 | 0.0 |
464 | 7.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 1.527 | 29.0 | 0.0 |
465 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
466 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
467 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
468 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
469 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
470 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
471 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
472 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
473 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
474 | 6.0 | 127.0 | 64.0 | 23.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
475 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
476 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
477 | 7.0 | 127.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
478 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 27.8 | 0.527 | 29.0 | 0.0 |
479 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
480 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
481 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
482 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
483 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
484 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
485 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
486 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
487 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
488 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
489 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 27.8 | 1.527 | 29.0 | 0.0 |
490 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
491 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
492 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
493 | 6.0 | 127.0 | 64.0 | 23.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
494 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
495 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
496 | 6.0 | 127.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
497 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
498 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 1.0 |
499 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
500 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
501 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
502 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
503 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 29.0 | 0.0 |
504 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
505 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
506 | 6.0 | 128.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 1.0 |
507 | 6.0 | 128.0 | 63.0 | 23.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
508 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
509 | 7.0 | 127.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
510 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
511 | 6.0 | 128.0 | 63.0 | 23.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
512 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
513 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
514 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
515 | 6.0 | 128.0 | 64.0 | 23.0 | 108.0 | 28.8 | 0.527 | 28.0 | 1.0 |
516 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
517 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
518 | 7.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
519 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 27.8 | 1.527 | 29.0 | 0.0 |
520 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
521 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
522 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
523 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
524 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
525 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
526 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
527 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
528 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
529 | 6.0 | 127.0 | 64.0 | 23.0 | 109.0 | 27.8 | 1.527 | 29.0 | 0.0 |
530 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
531 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
532 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 29.0 | 0.0 |
533 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
534 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
535 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
536 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
537 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 27.8 | 1.527 | 29.0 | 0.0 |
538 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
539 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
540 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
541 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
542 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
543 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
544 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
545 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
546 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
547 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
548 | 6.0 | 128.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
549 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
550 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
551 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
552 | 6.0 | 127.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
553 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
554 | 6.0 | 127.0 | 64.0 | 23.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
555 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
556 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
557 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
558 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
559 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
560 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
561 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
562 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
563 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 29.0 | 0.0 |
564 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
565 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 1.527 | 28.0 | 0.0 |
566 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
567 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
568 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
569 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
570 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
571 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
572 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
573 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
574 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
575 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
576 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 27.8 | 1.527 | 29.0 | 0.0 |
577 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
578 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
579 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
580 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
581 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
582 | 7.0 | 127.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
583 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
584 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
585 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
586 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
587 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 29.0 | 0.0 |
588 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
589 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
590 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
591 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
592 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
593 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
594 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
595 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
596 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
597 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 1.527 | 28.0 | 0.0 |
598 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
599 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
600 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
601 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
602 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
603 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
604 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
605 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
606 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
607 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
608 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
609 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
610 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
611 | 6.0 | 128.0 | 63.0 | 23.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
612 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
613 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
614 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
615 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
616 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
617 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
618 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
619 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
620 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
621 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 1.527 | 28.0 | 0.0 |
622 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
623 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
624 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
625 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
626 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
627 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
628 | 6.0 | 128.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
629 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
630 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 27.8 | 1.527 | 29.0 | 1.0 |
631 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
632 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
633 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
634 | 7.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 29.0 | 0.0 |
635 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
636 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
637 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
638 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 29.0 | 1.0 |
639 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
640 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
641 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
642 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
643 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 1.527 | 29.0 | 0.0 |
644 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 27.8 | 1.527 | 28.0 | 0.0 |
645 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
646 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 29.0 | 1.0 |
647 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
648 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
649 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
650 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
651 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
652 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
653 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
654 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
655 | 6.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
656 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
657 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
658 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
659 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 1.0 |
660 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
661 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
662 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
663 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
664 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
665 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
666 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
667 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 1.0 |
668 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
669 | 7.0 | 128.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
670 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
671 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 27.8 | 1.527 | 28.0 | 0.0 |
672 | 7.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
673 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
674 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
675 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
676 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 29.0 | 1.0 |
677 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
678 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
679 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 27.8 | 1.527 | 28.0 | 0.0 |
680 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
681 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
682 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
683 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
684 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
685 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
686 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
687 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
688 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 27.8 | 1.527 | 28.0 | 0.0 |
689 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
690 | 7.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 1.527 | 29.0 | 0.0 |
691 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
692 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
693 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
694 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
695 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
696 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
697 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
698 | 6.0 | 127.0 | 64.0 | 23.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
699 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 0.0 |
700 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
701 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 27.8 | 1.527 | 29.0 | 1.0 |
702 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
703 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
704 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
705 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
706 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
707 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
708 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
709 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
710 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
711 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
712 | 7.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
713 | 6.0 | 128.0 | 63.0 | 23.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
714 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
715 | 7.0 | 128.0 | 63.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
716 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
717 | 7.0 | 127.0 | 64.0 | 23.0 | 108.0 | 27.8 | 1.527 | 29.0 | 0.0 |
718 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
719 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
720 | 6.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
721 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
722 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
723 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
724 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
725 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
726 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
727 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
728 | 6.0 | 128.0 | 64.0 | 24.0 | 108.0 | 27.8 | 0.527 | 28.0 | 0.0 |
729 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
730 | 6.0 | 128.0 | 64.0 | 23.0 | 108.0 | 28.8 | 0.527 | 29.0 | 1.0 |
731 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
732 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
733 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
734 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 27.8 | 1.527 | 29.0 | 0.0 |
735 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
736 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
737 | 7.0 | 127.0 | 64.0 | 23.0 | 108.0 | 28.8 | 1.527 | 29.0 | 0.0 |
738 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
739 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
740 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
741 | 6.0 | 127.0 | 63.0 | 23.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
742 | 6.0 | 127.0 | 63.0 | 23.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
743 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
744 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 0.0 |
745 | 7.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 29.0 | 0.0 |
746 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
747 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 1.527 | 29.0 | 0.0 |
748 | 6.0 | 128.0 | 64.0 | 23.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
749 | 6.0 | 128.0 | 63.0 | 23.0 | 109.0 | 27.8 | 0.527 | 29.0 | 1.0 |
750 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 28.0 | 1.0 |
751 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
752 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 27.8 | 0.527 | 28.0 | 0.0 |
753 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 1.0 |
754 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
755 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 1.527 | 29.0 | 1.0 |
756 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
757 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
758 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
759 | 6.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
760 | 6.0 | 127.0 | 63.0 | 24.0 | 108.0 | 28.8 | 1.527 | 28.0 | 0.0 |
761 | 7.0 | 128.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
762 | 7.0 | 127.0 | 63.0 | 23.0 | 108.0 | 27.8 | 0.527 | 29.0 | 0.0 |
763 | 7.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 0.0 |
764 | 6.0 | 127.0 | 64.0 | 24.0 | 109.0 | 28.8 | 0.527 | 28.0 | 0.0 |
765 | 6.0 | 127.0 | 64.0 | 23.0 | 109.0 | 27.8 | 0.527 | 29.0 | 0.0 |
766 | 6.0 | 127.0 | 63.0 | 24.0 | 109.0 | 28.8 | 0.527 | 29.0 | 1.0 |
767 | 6.0 | 127.0 | 64.0 | 24.0 | 108.0 | 28.8 | 0.527 | 28.0 | 0.0 |
def calc_total_entropy(train_data, label, class_list,t):
total_row = train_data[t].sum() #the total size of the dataset
total_entr = 0
for c in class_list: #for each class in the label
total_class_count = train_data[train_data[label] == c].sum()[t] #number of the class
total_class_entr = - (total_class_count/total_row)*np.log2(total_class_count/total_row) #entropy of the class
total_entr += total_class_entr #adding the class entropy to the total entropy of the dataset
return total_entr
def calc_entropy(feature_value_data, label, class_list,t):
class_count = feature_value_data[t].sum()
entropy = 0
for c in class_list:
label_class_count = feature_value_data[feature_value_data[label] == c].sum()[t] #row count of class c
entropy_class = 0
if label_class_count != 0:
probability_class = label_class_count/class_count #probability of the class
entropy_class = - probability_class * np.log2(probability_class) #entropy
entropy += entropy_class
return entropy
def calc_info_gain(feature_name, train_data, label, class_list,t):
feature_value_list = train_data[feature_name].unique() #unqiue values of the feature
total_row = train_data[t].sum()
feature_info = 0.0
for feature_value in feature_value_list:
feature_value_data = train_data[train_data[feature_name] == feature_value] #filtering rows with that feature_value
feature_value_count = feature_value_data[t].sum()
feature_value_entropy = calc_entropy(feature_value_data, label, class_list,t) #calculcating entropy for the feature value
feature_value_probability = feature_value_count/total_row
feature_info += feature_value_probability * feature_value_entropy #calculating information of the feature value
return calc_total_entropy(train_data, label, class_list,t) - feature_info #calculating information gain by subtracting
def calc_sp_info(feature_name, train_data, label, class_list,t):
feature_value_list = train_data[feature_name].unique() #unqiue values of the feature
total_row = train_data[t].sum()
feature_info = 0.0
for feature_value in feature_value_list:
feature_value_data = train_data[train_data[feature_name] == feature_value]
feature_value_count = feature_value_data[t].sum()
#feature_value_entropy = calc_entropy(feature_value_data, label, class_list)
feature_value_probability = feature_value_count/total_row
feature_info += -feature_value_probability * np.log2(feature_value_probability)
return feature_info
def find_most_informative_feature(train_data, label, class_list,t):
feature_list = ['Pregnancies', 'Glucose', 'BloodPressure', 'SkinThickness', 'Insulin', 'BMI', 'DiabetesPedigreeFunction','Age']
max_gn_rat = -1
gn_rat = -1
max_info_feature = None
for feature in feature_list:
feature_info_gain = calc_info_gain(feature, train_data, label, class_list,t)
feature_sp_info = calc_sp_info(feature, train_data, label, class_list,t)
try:
gn_rat=feature_info_gain.item()/feature_sp_info.item()
except:
gn_rat = -1
if max_gn_rat < gn_rat:
max_gn_rat = gn_rat
max_info_feature = feature
return max_info_feature
def generate_sub_tree(feature_name, train_data, label, class_list, t,cek=None):
if feature_name==None:
feature_name=cek
xcv={}
for tmp in train_data[feature_name].unique():
ss=train_data[train_data[feature_name]==tmp].sum()[t]
xcv[tmp]=ss
feature_value_count_dict= pd.Series(xcv)
f_left=8
l_node=False
cf= ['Pregnancies', 'Glucose', 'BloodPressure', 'SkinThickness', 'Insulin', 'BMI', 'DiabetesPedigreeFunction','Age']
for f in cf:
panj = len(train_data[f].unique())
if panj == 1:
f_left-=1
if f_left<=1:
l_node=True
tree = {} #sub tree or node
cek_fitur = cek
for feature_value, count in feature_value_count_dict.iteritems():
feature_value_data = train_data[train_data[feature_name] == feature_value] #dataset with only feature_name = feature_value
assigned_to_node = False #flag for tracking feature_value is pure class or not
cek_count = []
cek_class = []
for cek in class_list:
class_count = feature_value_data[feature_value_data[label] == cek].sum()[t] #count of class c
cek_count.append(class_count)
cek_class.append(cek)
for c in class_list: #for each class
class_count = feature_value_data[feature_value_data[label] == c].sum()[t] #count of class c
if l_node ==True:
if cek_count[1] > cek_count[0] :
tree[feature_value] = float(cek_class[1]) #adding node to the tree
train_data = train_data[train_data[feature_name] != feature_value] #removing rows with feature_value
assigned_to_node = True
else :
tree[feature_value] = float(cek_class[0]) #adding node to the tree
train_data = train_data[train_data[feature_name] != feature_value] #removing rows with feature_value
assigned_to_node = True
elif cek_fitur != feature_name : #tambahan pengecekan
if class_count == count: #count of feature_value = count of class (pure class)
tree[feature_value] = c #adding node to the tree
train_data = train_data[train_data[feature_name] != feature_value] #removing rows with feature_value
assigned_to_node = True
elif cek_fitur == feature_name : #tambahan pengecekan
if cek_count[1] > cek_count[0] :
tree[feature_value] = float(cek_class[1]) #adding node to the tree
train_data = train_data[train_data[feature_name] != feature_value] #removing rows with feature_value
assigned_to_node = True
else :
tree[feature_value] = float(cek_class[0]) #adding node to the tree
train_data = train_data[train_data[feature_name] != feature_value] #removing rows with feature_value
assigned_to_node = True
if not assigned_to_node: #not pure class
tree[feature_value] = "?" #should extend the node, so the branch is marked with ?
return tree, train_data
def make_tree(root, prev_feature_value, train_data, label, class_list,t , cek_fitur=None):
if train_data[t].sum() != 0: #if dataset becomes enpty after updating
max_info_feature = find_most_informative_feature(train_data, label, class_list,t) #most informative feature
tree, train_data = generate_sub_tree(max_info_feature, train_data, label, class_list,t, cek_fitur)
next_root = None
if max_info_feature == None :
max_info_feature=cek_fitur
if prev_feature_value != None: #add to intermediate node of the tree
root[prev_feature_value] = dict()
root[prev_feature_value][max_info_feature] = tree
next_root = root[prev_feature_value][max_info_feature]
else: #add to root of the tree
root[max_info_feature] = tree
next_root = root[max_info_feature]
for node, branch in list(next_root.items()): #iterating the tree node
if branch == "?": #if it is expandable
feature_value_data = train_data[train_data[max_info_feature] == node] #using the updated dataset
cek_fitur = max_info_feature
make_tree(next_root, node, feature_value_data, label, class_list,t, cek_fitur) #recursive call with updated dataset
def generate_sub_stump(feature_name, train_data, label, class_list, t,cek=None):
if feature_name==None:
feature_name=cek
xcv={}
for tmp in train_data[feature_name].unique():
ss=train_data[train_data[feature_name]==tmp].sum()[t]
xcv[tmp]=ss
feature_value_count_dict= pd.Series(xcv)
f_left=8
l_node=False
cf= ['Pregnancies', 'Glucose', 'BloodPressure', 'SkinThickness', 'Insulin', 'BMI', 'DiabetesPedigreeFunction','Age']
for f in cf:
panj = len(train_data[f].unique())
if panj == 1:
f_left-=1
if f_left<=1:
l_node=True
tree = {}
for feature_value, count in feature_value_count_dict.iteritems():
feature_value_data = train_data[train_data[feature_name] == feature_value]
cek_count = []
cek_class = []
for cek in class_list:
class_count = feature_value_data[feature_value_data[label] == cek].sum()[t] #count of class c
cek_count.append(class_count)
cek_class.append(cek)
for c in class_list: #for each class
class_count = feature_value_data[feature_value_data[label] == c].sum()[t]
if cek_count[1] > cek_count[0] :
tree[feature_value] = float(cek_class[1])
else :
tree[feature_value] = float(cek_class[0])
return tree
def make_stump(root, prev_feature_value, train_data, label, class_list,t , cek_fitur=None):
if train_data[t].sum() != 0:
max_info_feature = find_most_informative_feature(train_data, label, class_list,t) #most informative feature
tree = generate_sub_stump(max_info_feature, train_data, label, class_list,t, cek_fitur)
root[max_info_feature] = tree
def c45(train_data_m, label,t):
train_data = train_data_m.copy() #getting a copy of the dataset
if type(t) == int :
train_data,tn= weigher(train_data,t)
else :
tn=t
tree = {} #tree which will be updated
class_list = train_data[label].unique() #getting unqiue classes of the label
make_tree(tree, None, train_data, label, class_list,tn) #start calling recursion
return tree
def s_c45(train_data_m, label,t):
train_data = train_data_m.copy() #getting a copy of the dataset
tree = {} #tree which will be updated
class_list = train_data[label].unique() #getting unqiue classes of the label
#print("class list :", class_list)
make_stump(tree, None, train_data_m, label, class_list,t) #start calling recursion
return tree
def formatData(t,s):
if not isinstance(t,dict) and not isinstance(t,list):
print("\t"*s+str(t))
else:
for key in t:
print("\t"*s+str(key))
if not isinstance(t,list):
formatData(t[key],s+1)
def predict(tree, instance):
if not isinstance(tree, dict): #if it is leaf node
return tree #return the value
else:
root_node = next(iter(tree)) #getting first key/feature name of the dictionary
feature_value = instance[root_node] #value of the feature
if feature_value in tree[root_node]: #checking the feature value in current tree node
return predict(tree[root_node][feature_value], instance) #goto next feature
else:
return None
def evaluate(tree, test_data_m):
hasil_prediksi = []
for i in range (len(test_data_m)): #for each row in the dataset
result = predict(tree, test_data_m.iloc[i]) #predict the row
hasil_prediksi.append(result) #mendapatkan hasil prediksi dari data yang diuji
return hasil_prediksi
def performa(y_test, y_pred):
#confusion matrix
matrix = confusion_matrix(y_test, y_pred, labels=[1,0])
print('Confusion matrix : \n',matrix)
print("")
tp, fn, fp, tn = confusion_matrix(y_test, y_pred,labels=[1,0]).reshape(-1)
print("True positive :", tp)
print("False negative :", fn)
print("False positive :", fp)
print("True negative :", tn)
print("")
matrix = classification_report(y_test, y_pred,labels=[1,0])
print('Classification report : \n',matrix)
print("")
#akurasi
accuracy = round(accuracy_score(y_test, y_pred)*100, 2)
presisi = round((tp/(tp+fp))*100, 2)
recall = round((tp/(tp+fn))*100, 2)
print("Akurasi fold ini adalah :", accuracy, " %")
print("Presisi fold ini adalah :", presisi, " %")
print("Recall fold ini adalah :", recall, " %")
accuracy2 = round(((tp+tn)/(tp+fn+fp+tn)*100), 2)
print("===========================================================")
return accuracy, presisi, recall
def t_to(t):
t=str(t)
t='w'+t
return t
def weigher(train_data,t,missc=None,alpha=None):
ts=t_to(t)
tsb=t_to(t-1)
val = train_data.shape[0]
if t == 0:
zz = [1/val for _ in range(val)]
train_data[ts]=zz
return train_data,ts
else :
xx=[x for x in train_data[tsb]]
train_data[ts]=xx
for i in missc:
nweight=train_data[ts][i]*exp(alpha)
train_data.at[i,ts]=nweight
norm = [x/train_data[ts].sum() for x in train_data[ts]]
train_data[ts]=norm
train_data=train_data.drop(columns=[tsb])
return train_data,ts
def stump(tree,train_data,t):
hp = evaluate(tree,train_data)
missc=[]
bennc=[]
erate=0
train_data['hp']=hp
for i in train_data.index:
if train_data['Outcome'][i] != train_data['hp'][i]:
missc.append(i)
erate+=train_data[t][i]
else :
bennc.append(i)
alpha = log((1-erate)/erate)
return missc,alpha
def adaboost(t,train_data):
missc=0
alpha=0
hk=[]
for i in range (t):
train_data,tn = weigher(train_data,i,missc,alpha)
tree=s_c45(train_data,'Outcome',tn)
missc,alpha=stump(tree,train_data,tn)
print(tn,tree,alpha)
hk.append([tree,alpha])
return hk
def predict_adb(tree,test_data_m,sk):
pred=[]
w_pred=[]
for ij in range(len(test_data_m)):
ppos=0
pneg=0
ttmp=[]
for ji in range(sk):
hk=predict(tree[ji][0],test_data_m.iloc[ij])
ttmp.append(hk)
if hk == 0:
pneg+=tree[ji][1]
elif hk == 1:
ppos+=tree[ji][1]
if ppos>pneg :
hk= 1
elif ppos<pneg:
hk = 0
w_pred.append(ttmp)
pred.append(hk)
return pred
def k_fold(dataset,t,skn):
polo=len(dataset)
k=10
fsize=round(polo/k)
c45_final={}
c45_final_acc =[]
c45_final_pre=[]
c45_final_rec=[]
adab_final={}
adab_final_acc=[]
adab_final_pre=[]
adab_final_rec=[]
adatree=[]
treec45s=[]
for o in range(k):
ada_sk=[]
ada_pre=[]
ada_rec=[]
bak=round(polo/k)
aa= 0 if o==0 else o+1
bak= bak if aa==0 else bak*aa
baw=bak-fsize
test_data = dataset[aa:bak] if aa == 0 else dataset[baw:bak]
train_data = dataset[bak:] if aa == 0 else pd.concat([dataset[:baw],dataset[bak:]])
y_test=test_data['Outcome']
yyxx=[x for x in y_test]
print(">>> FOLD KE ",o+1)
print("Panjang data test :", test_data.shape[0])
print("Panjang data train :", train_data.shape[0])
print("Data Test :\n",yyxx)
print("*******************")
print("Algoritma C4.5")
print("*******************")
treec45=c45(train_data.copy(),'Outcome',0)
c45_y_pred =evaluate(treec45,test_data)
print("Hasil Prediksi C45 :\n", c45_y_pred)
accuracy, presisi, recall = performa(y_test, c45_y_pred)
c45_final_acc.append(accuracy)
c45_final_pre.append(presisi)
c45_final_rec.append(recall)
treec45s.append(treec45)
adahk=adaboost(t,train_data.copy())
hsk=[]
for sk in skn:
hkk=predict_adb(adahk,test_data,sk)
hsk.append(hkk)
print("*******************")
print("Adaboost ","t-",sk)
print("*******************")
print("Hasil Prediksi Adaboost :\n", hkk)
ax1,ax2,ax3 = performa(y_test, hkk)
ada_sk.append(ax1)
ada_pre.append(ax2)
ada_rec.append(ax3)
adatree.append(adahk)
adab_final_acc.append(ada_sk)
adab_final_pre.append(ada_pre)
adab_final_rec.append(ada_rec)
c45_final['akurasi']=c45_final_acc
c45_final['presisi']=c45_final_pre
c45_final['recall']=c45_final_rec
adab_final['akurasi']=adab_final_acc
adab_final['presisi']=adab_final_pre
adab_final['recall']=adab_final_rec
return c45_final,adab_final,adatree,treec45s
sknar=[5,10,15,20]
t=20
wk_acc_c45,wk_acc_adb,adatree,treec45s=k_fold(dwithknn,t,sknar)
>>> FOLD KE 1 Panjang data test : 77 Panjang data train : 691 Data Test : [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0] Confusion matrix : [[15 17] [11 34]] True positive : 15 False negative : 17 False positive : 11 True negative : 34 Classification report : precision recall f1-score support 1 0.58 0.47 0.52 32 0 0.67 0.76 0.71 45 accuracy 0.64 77 macro avg 0.62 0.61 0.61 77 weighted avg 0.63 0.64 0.63 77 Akurasi fold ini adalah : 63.64 % Presisi fold ini adalah : 57.69 % Recall fold ini adalah : 46.88 % =========================================================== w0 {'Glucose': {127.0: 0.0, 128.0: 1.0}} 1.10440940635244 w1 {'Insulin': {108.0: 0.0, 109.0: 0.0}} 0.5105149608566384 w2 {'Insulin': {108.0: 0.0, 109.0: 1.0}} 0.5827071946525647 w3 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.29741316767195075 w4 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.4616143510693342 w5 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.3903193823112505 w6 {'Age': {28.0: 0.0, 29.0: 1.0}} 0.4078434627714327 w7 {'DiabetesPedigreeFunction': {0.527: 0.0, 1.5270000000000001: 1.0}} 0.402934904789061 w8 {'Glucose': {127.0: 1.0, 128.0: 0.0}} 0.3265825744284367 w9 {'SkinThickness': {24.0: 0.0, 23.0: 0.0}} 0.21222065930141987 w10 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.17014242126889767 w11 {'Pregnancies': {6.0: 0.0, 7.0: 1.0}} 0.22458853671758877 w12 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.12141524726088349 w13 {'Insulin': {108.0: 0.0, 109.0: 0.0}} 0.050689493098667965 w14 {'Insulin': {108.0: 0.0, 109.0: 1.0}} 0.07207070344663198 w15 {'BloodPressure': {64.0: 0.0, 63.0: 1.0}} 0.06056865284921031 w16 {'DiabetesPedigreeFunction': {0.527: 1.0, 1.5270000000000001: 0.0}} 0.0489943889764297 w17 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.028790608426090183 w18 {'Glucose': {127.0: 1.0, 128.0: 0.0}} 0.03669136337045756 w19 {'Insulin': {108.0: 0.0, 109.0: 0.0}} 0.030320755499146708 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0] Confusion matrix : [[16 16] [11 34]] True positive : 16 False negative : 16 False positive : 11 True negative : 34 Classification report : precision recall f1-score support 1 0.59 0.50 0.54 32 0 0.68 0.76 0.72 45 accuracy 0.65 77 macro avg 0.64 0.63 0.63 77 weighted avg 0.64 0.65 0.64 77 Akurasi fold ini adalah : 64.94 % Presisi fold ini adalah : 59.26 % Recall fold ini adalah : 50.0 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0] Confusion matrix : [[21 11] [12 33]] True positive : 21 False negative : 11 False positive : 12 True negative : 33 Classification report : precision recall f1-score support 1 0.64 0.66 0.65 32 0 0.75 0.73 0.74 45 accuracy 0.70 77 macro avg 0.69 0.69 0.69 77 weighted avg 0.70 0.70 0.70 77 Akurasi fold ini adalah : 70.13 % Presisi fold ini adalah : 63.64 % Recall fold ini adalah : 65.62 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0] Confusion matrix : [[20 12] [11 34]] True positive : 20 False negative : 12 False positive : 11 True negative : 34 Classification report : precision recall f1-score support 1 0.65 0.62 0.63 32 0 0.74 0.76 0.75 45 accuracy 0.70 77 macro avg 0.69 0.69 0.69 77 weighted avg 0.70 0.70 0.70 77 Akurasi fold ini adalah : 70.13 % Presisi fold ini adalah : 64.52 % Recall fold ini adalah : 62.5 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0] Confusion matrix : [[20 12] [11 34]] True positive : 20 False negative : 12 False positive : 11 True negative : 34 Classification report : precision recall f1-score support 1 0.65 0.62 0.63 32 0 0.74 0.76 0.75 45 accuracy 0.70 77 macro avg 0.69 0.69 0.69 77 weighted avg 0.70 0.70 0.70 77 Akurasi fold ini adalah : 70.13 % Presisi fold ini adalah : 64.52 % Recall fold ini adalah : 62.5 % =========================================================== >>> FOLD KE 2 Panjang data test : 77 Panjang data train : 691 Data Test : [0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0] Confusion matrix : [[15 7] [13 42]] True positive : 15 False negative : 7 False positive : 13 True negative : 42 Classification report : precision recall f1-score support 1 0.54 0.68 0.60 22 0 0.86 0.76 0.81 55 accuracy 0.74 77 macro avg 0.70 0.72 0.70 77 weighted avg 0.77 0.74 0.75 77 Akurasi fold ini adalah : 74.03 % Presisi fold ini adalah : 53.57 % Recall fold ini adalah : 68.18 % =========================================================== w0 {'Insulin': {109.0: 1.0, 108.0: 0.0}} 0.656465613925347 w1 {'Glucose': {128.0: 0.0, 127.0: 0.0}} 0.8770774997129767 w2 {'Glucose': {128.0: 1.0, 127.0: 0.0}} 0.752291949469069 w3 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.5482750124497324 w4 {'Age': {29.0: 1.0, 28.0: 0.0}} 0.43787527014685307 w5 {'SkinThickness': {24.0: 0.0, 23.0: 0.0}} 0.32377557296169585 w6 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.26238039737868124 w7 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.35619679374851937 w8 {'Pregnancies': {6.0: 0.0, 7.0: 1.0}} 0.22093034541067996 w9 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.10849171802954947 w10 {'Glucose': {128.0: 0.0, 127.0: 1.0}} 0.085174653650404 w11 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.04030802250558255 w12 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.05494529081822132 w13 {'BloodPressure': {64.0: 0.0, 63.0: 1.0}} 0.0661801615781846 w14 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.03524132600835429 w15 {'Age': {29.0: 0.0, 28.0: 1.0}} 0.03698085925431426 w16 {'Pregnancies': {6.0: 0.0, 7.0: 1.0}} 0.032721275417328506 w17 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.018475417449789967 w18 {'BloodPressure': {64.0: 0.0, 63.0: 1.0}} 0.009903989354940862 w19 {'Insulin': {109.0: 1.0, 108.0: 0.0}} 0.014493390376846634 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1] Confusion matrix : [[17 5] [15 40]] True positive : 17 False negative : 5 False positive : 15 True negative : 40 Classification report : precision recall f1-score support 1 0.53 0.77 0.63 22 0 0.89 0.73 0.80 55 accuracy 0.74 77 macro avg 0.71 0.75 0.71 77 weighted avg 0.79 0.74 0.75 77 Akurasi fold ini adalah : 74.03 % Presisi fold ini adalah : 53.12 % Recall fold ini adalah : 77.27 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1] Confusion matrix : [[13 9] [ 8 47]] True positive : 13 False negative : 9 False positive : 8 True negative : 47 Classification report : precision recall f1-score support 1 0.62 0.59 0.60 22 0 0.84 0.85 0.85 55 accuracy 0.78 77 macro avg 0.73 0.72 0.73 77 weighted avg 0.78 0.78 0.78 77 Akurasi fold ini adalah : 77.92 % Presisi fold ini adalah : 61.9 % Recall fold ini adalah : 59.09 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1] Confusion matrix : [[13 9] [ 8 47]] True positive : 13 False negative : 9 False positive : 8 True negative : 47 Classification report : precision recall f1-score support 1 0.62 0.59 0.60 22 0 0.84 0.85 0.85 55 accuracy 0.78 77 macro avg 0.73 0.72 0.73 77 weighted avg 0.78 0.78 0.78 77 Akurasi fold ini adalah : 77.92 % Presisi fold ini adalah : 61.9 % Recall fold ini adalah : 59.09 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1] Confusion matrix : [[14 8] [ 9 46]] True positive : 14 False negative : 8 False positive : 9 True negative : 46 Classification report : precision recall f1-score support 1 0.61 0.64 0.62 22 0 0.85 0.84 0.84 55 accuracy 0.78 77 macro avg 0.73 0.74 0.73 77 weighted avg 0.78 0.78 0.78 77 Akurasi fold ini adalah : 77.92 % Presisi fold ini adalah : 60.87 % Recall fold ini adalah : 63.64 % =========================================================== >>> FOLD KE 3 Panjang data test : 77 Panjang data train : 691 Data Test : [1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0] Confusion matrix : [[29 5] [ 9 34]] True positive : 29 False negative : 5 False positive : 9 True negative : 34 Classification report : precision recall f1-score support 1 0.76 0.85 0.81 34 0 0.87 0.79 0.83 43 accuracy 0.82 77 macro avg 0.82 0.82 0.82 77 weighted avg 0.82 0.82 0.82 77 Akurasi fold ini adalah : 81.82 % Presisi fold ini adalah : 76.32 % Recall fold ini adalah : 85.29 % =========================================================== w0 {'Insulin': {109.0: 0.0, 108.0: 0.0}} 0.6693622755365096 w1 {'Insulin': {109.0: 1.0, 108.0: 0.0}} 0.9070401171457856 w2 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.38845229291061584 w3 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.5190140002520912 w4 {'Glucose': {128.0: 1.0, 127.0: 0.0}} 0.6644931834435341 w5 {'SkinThickness': {24.0: 0.0, 23.0: 0.0}} 0.22513797598828117 w6 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.3593521807914274 w7 {'Age': {29.0: 1.0, 28.0: 0.0}} 0.40561019719745184 w8 {'Pregnancies': {6.0: 0.0, 7.0: 1.0}} 0.3301917125686123 w9 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.24027833530577264 w10 {'Insulin': {109.0: 0.0, 108.0: 1.0}} 0.21433400070772435 w11 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.1444724689548544 w12 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.0453125646710913 w13 {'SkinThickness': {24.0: 0.0, 23.0: 0.0}} 0.0735094596457429 w14 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.04662997658070943 w15 {'Pregnancies': {6.0: 0.0, 7.0: 0.0}} 0.029600092344392103 w16 {'Pregnancies': {6.0: 1.0, 7.0: 0.0}} 0.044227829143850296 w17 {'SkinThickness': {24.0: 0.0, 23.0: 0.0}} 0.022778329084824703 w18 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.021653205777851466 w19 {'Insulin': {109.0: 0.0, 108.0: 1.0}} 0.03300982325983914 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1] Confusion matrix : [[24 10] [ 8 35]] True positive : 24 False negative : 10 False positive : 8 True negative : 35 Classification report : precision recall f1-score support 1 0.75 0.71 0.73 34 0 0.78 0.81 0.80 43 accuracy 0.77 77 macro avg 0.76 0.76 0.76 77 weighted avg 0.77 0.77 0.77 77 Akurasi fold ini adalah : 76.62 % Presisi fold ini adalah : 75.0 % Recall fold ini adalah : 70.59 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1] Confusion matrix : [[26 8] [ 9 34]] True positive : 26 False negative : 8 False positive : 9 True negative : 34 Classification report : precision recall f1-score support 1 0.74 0.76 0.75 34 0 0.81 0.79 0.80 43 accuracy 0.78 77 macro avg 0.78 0.78 0.78 77 weighted avg 0.78 0.78 0.78 77 Akurasi fold ini adalah : 77.92 % Presisi fold ini adalah : 74.29 % Recall fold ini adalah : 76.47 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1] Confusion matrix : [[27 7] [ 9 34]] True positive : 27 False negative : 7 False positive : 9 True negative : 34 Classification report : precision recall f1-score support 1 0.75 0.79 0.77 34 0 0.83 0.79 0.81 43 accuracy 0.79 77 macro avg 0.79 0.79 0.79 77 weighted avg 0.79 0.79 0.79 77 Akurasi fold ini adalah : 79.22 % Presisi fold ini adalah : 75.0 % Recall fold ini adalah : 79.41 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1] Confusion matrix : [[27 7] [ 9 34]] True positive : 27 False negative : 7 False positive : 9 True negative : 34 Classification report : precision recall f1-score support 1 0.75 0.79 0.77 34 0 0.83 0.79 0.81 43 accuracy 0.79 77 macro avg 0.79 0.79 0.79 77 weighted avg 0.79 0.79 0.79 77 Akurasi fold ini adalah : 79.22 % Presisi fold ini adalah : 75.0 % Recall fold ini adalah : 79.41 % =========================================================== >>> FOLD KE 4 Panjang data test : 77 Panjang data train : 691 Data Test : [1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] Confusion matrix : [[19 11] [14 33]] True positive : 19 False negative : 11 False positive : 14 True negative : 33 Classification report : precision recall f1-score support 1 0.58 0.63 0.60 30 0 0.75 0.70 0.73 47 accuracy 0.68 77 macro avg 0.66 0.67 0.66 77 weighted avg 0.68 0.68 0.68 77 Akurasi fold ini adalah : 67.53 % Presisi fold ini adalah : 57.58 % Recall fold ini adalah : 63.33 % =========================================================== w0 {'Insulin': {109.0: 1.0, 108.0: 0.0}} 0.6693622755365096 w1 {'Glucose': {128.0: 0.0, 127.0: 0.0}} 0.9177443632614095 w2 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.5843608162804057 w3 {'Glucose': {128.0: 1.0, 127.0: 0.0}} 0.8065845870110339 w4 {'Age': {29.0: 1.0, 28.0: 0.0}} 0.46271971252002264 w5 {'SkinThickness': {24.0: 0.0, 23.0: 0.0}} 0.3501036828870159 w6 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.2710024271191263 w7 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.2994537240308235 w8 {'Pregnancies': {6.0: 0.0, 7.0: 1.0}} 0.2155806392744649 w9 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.14988449170255355 w10 {'Glucose': {128.0: 0.0, 127.0: 1.0}} 0.10945813699346084 w11 {'SkinThickness': {24.0: 0.0, 23.0: 0.0}} 0.08553936511998017 w12 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.05655937407307682 w13 {'BloodPressure': {64.0: 0.0, 63.0: 0.0}} 0.035805415554311285 w14 {'BloodPressure': {64.0: 1.0, 63.0: 0.0}} 0.04411214664607982 w15 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.029695009057984542 w16 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.033959455747984674 w17 {'Age': {29.0: 0.0, 28.0: 1.0}} 0.05337244794106118 w18 {'BloodPressure': {64.0: 1.0, 63.0: 0.0}} 0.018261596716922898 w19 {'Insulin': {109.0: 0.0, 108.0: 1.0}} 0.03884950184349952 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0] Confusion matrix : [[17 13] [13 34]] True positive : 17 False negative : 13 False positive : 13 True negative : 34 Classification report : precision recall f1-score support 1 0.57 0.57 0.57 30 0 0.72 0.72 0.72 47 accuracy 0.66 77 macro avg 0.65 0.65 0.65 77 weighted avg 0.66 0.66 0.66 77 Akurasi fold ini adalah : 66.23 % Presisi fold ini adalah : 56.67 % Recall fold ini adalah : 56.67 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0] Confusion matrix : [[18 12] [11 36]] True positive : 18 False negative : 12 False positive : 11 True negative : 36 Classification report : precision recall f1-score support 1 0.62 0.60 0.61 30 0 0.75 0.77 0.76 47 accuracy 0.70 77 macro avg 0.69 0.68 0.68 77 weighted avg 0.70 0.70 0.70 77 Akurasi fold ini adalah : 70.13 % Presisi fold ini adalah : 62.07 % Recall fold ini adalah : 60.0 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0] Confusion matrix : [[19 11] [14 33]] True positive : 19 False negative : 11 False positive : 14 True negative : 33 Classification report : precision recall f1-score support 1 0.58 0.63 0.60 30 0 0.75 0.70 0.73 47 accuracy 0.68 77 macro avg 0.66 0.67 0.66 77 weighted avg 0.68 0.68 0.68 77 Akurasi fold ini adalah : 67.53 % Presisi fold ini adalah : 57.58 % Recall fold ini adalah : 63.33 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0] Confusion matrix : [[18 12] [11 36]] True positive : 18 False negative : 12 False positive : 11 True negative : 36 Classification report : precision recall f1-score support 1 0.62 0.60 0.61 30 0 0.75 0.77 0.76 47 accuracy 0.70 77 macro avg 0.69 0.68 0.68 77 weighted avg 0.70 0.70 0.70 77 Akurasi fold ini adalah : 70.13 % Presisi fold ini adalah : 62.07 % Recall fold ini adalah : 60.0 % =========================================================== >>> FOLD KE 5 Panjang data test : 77 Panjang data train : 691 Data Test : [1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] Confusion matrix : [[17 10] [ 7 43]] True positive : 17 False negative : 10 False positive : 7 True negative : 43 Classification report : precision recall f1-score support 1 0.71 0.63 0.67 27 0 0.81 0.86 0.83 50 accuracy 0.78 77 macro avg 0.76 0.74 0.75 77 weighted avg 0.78 0.78 0.78 77 Akurasi fold ini adalah : 77.92 % Presisi fold ini adalah : 70.83 % Recall fold ini adalah : 62.96 % =========================================================== w0 {'Insulin': {109.0: 0.0, 108.0: 0.0}} 0.6244506492737171 w1 {'Insulin': {109.0: 1.0, 108.0: 0.0}} 0.8856189292844829 w2 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.3921217836461866 w3 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.5693456405008482 w4 {'Glucose': {128.0: 1.0, 127.0: 0.0}} 0.6492692790391739 w5 {'Pregnancies': {6.0: 0.0, 7.0: 1.0}} 0.5064497287467298 w6 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.22364286277661025 w7 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.34424087200730547 w8 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.1025893505028567 w9 {'Age': {29.0: 1.0, 28.0: 0.0}} 0.20310321899779077 w10 {'Insulin': {109.0: 0.0, 108.0: 1.0}} 0.24724250767354644 w11 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.06574303933826153 w12 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.1005737729730464 w13 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.08570070992150383 w14 {'BloodPressure': {64.0: 0.0, 63.0: 1.0}} 0.10007025347203398 w15 {'Age': {29.0: 1.0, 28.0: 0.0}} 0.10300179463707708 w16 {'Pregnancies': {6.0: 1.0, 7.0: 0.0}} 0.1008831739165547 w17 {'SkinThickness': {24.0: 0.0, 23.0: 0.0}} 0.06516827679038725 w18 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.060553514307021605 w19 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.039346752732509066 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0] Confusion matrix : [[15 12] [ 5 45]] True positive : 15 False negative : 12 False positive : 5 True negative : 45 Classification report : precision recall f1-score support 1 0.75 0.56 0.64 27 0 0.79 0.90 0.84 50 accuracy 0.78 77 macro avg 0.77 0.73 0.74 77 weighted avg 0.78 0.78 0.77 77 Akurasi fold ini adalah : 77.92 % Presisi fold ini adalah : 75.0 % Recall fold ini adalah : 55.56 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0] Confusion matrix : [[18 9] [ 7 43]] True positive : 18 False negative : 9 False positive : 7 True negative : 43 Classification report : precision recall f1-score support 1 0.72 0.67 0.69 27 0 0.83 0.86 0.84 50 accuracy 0.79 77 macro avg 0.77 0.76 0.77 77 weighted avg 0.79 0.79 0.79 77 Akurasi fold ini adalah : 79.22 % Presisi fold ini adalah : 72.0 % Recall fold ini adalah : 66.67 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0] Confusion matrix : [[16 11] [ 5 45]] True positive : 16 False negative : 11 False positive : 5 True negative : 45 Classification report : precision recall f1-score support 1 0.76 0.59 0.67 27 0 0.80 0.90 0.85 50 accuracy 0.79 77 macro avg 0.78 0.75 0.76 77 weighted avg 0.79 0.79 0.79 77 Akurasi fold ini adalah : 79.22 % Presisi fold ini adalah : 76.19 % Recall fold ini adalah : 59.26 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0] Confusion matrix : [[18 9] [ 6 44]] True positive : 18 False negative : 9 False positive : 6 True negative : 44 Classification report : precision recall f1-score support 1 0.75 0.67 0.71 27 0 0.83 0.88 0.85 50 accuracy 0.81 77 macro avg 0.79 0.77 0.78 77 weighted avg 0.80 0.81 0.80 77 Akurasi fold ini adalah : 80.52 % Presisi fold ini adalah : 75.0 % Recall fold ini adalah : 66.67 % =========================================================== >>> FOLD KE 6 Panjang data test : 77 Panjang data train : 691 Data Test : [0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0] Confusion matrix : [[23 7] [ 7 40]] True positive : 23 False negative : 7 False positive : 7 True negative : 40 Classification report : precision recall f1-score support 1 0.77 0.77 0.77 30 0 0.85 0.85 0.85 47 accuracy 0.82 77 macro avg 0.81 0.81 0.81 77 weighted avg 0.82 0.82 0.82 77 Akurasi fold ini adalah : 81.82 % Presisi fold ini adalah : 76.67 % Recall fold ini adalah : 76.67 % =========================================================== w0 {'Insulin': {109.0: 0.0, 108.0: 0.0}} 0.6436214518115658 w1 {'Insulin': {109.0: 1.0, 108.0: 0.0}} 0.9059286833114396 w2 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.40107702532169026 w3 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.4938270047430686 w4 {'Glucose': {128.0: 1.0, 127.0: 0.0}} 0.6364785793411107 w5 {'Pregnancies': {6.0: 0.0, 7.0: 1.0}} 0.4785484130245446 w6 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.32598154922019473 w7 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.30252119812220085 w8 {'Age': {29.0: 1.0, 28.0: 0.0}} 0.18685207137228102 w9 {'Insulin': {109.0: 0.0, 108.0: 1.0}} 0.20861064809505306 w10 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.06728818572197127 w11 {'Pregnancies': {6.0: 0.0, 7.0: 0.0}} 0.056697121642894865 w12 {'Pregnancies': {6.0: 1.0, 7.0: 0.0}} 0.06640968725853895 w13 {'Age': {29.0: 1.0, 28.0: 0.0}} 0.09355066170646585 w14 {'Pregnancies': {6.0: 0.0, 7.0: 0.0}} 0.05272919994322677 w15 {'Pregnancies': {6.0: 1.0, 7.0: 0.0}} 0.05225745996822638 w16 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.02726091327424862 w17 {'BloodPressure': {64.0: 0.0, 63.0: 1.0}} 0.05348373898113194 w18 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.026558851120298748 w19 {'Age': {29.0: 1.0, 28.0: 0.0}} 0.033970549304723624 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0] Confusion matrix : [[20 10] [ 7 40]] True positive : 20 False negative : 10 False positive : 7 True negative : 40 Classification report : precision recall f1-score support 1 0.74 0.67 0.70 30 0 0.80 0.85 0.82 47 accuracy 0.78 77 macro avg 0.77 0.76 0.76 77 weighted avg 0.78 0.78 0.78 77 Akurasi fold ini adalah : 77.92 % Presisi fold ini adalah : 74.07 % Recall fold ini adalah : 66.67 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0] Confusion matrix : [[21 9] [ 5 42]] True positive : 21 False negative : 9 False positive : 5 True negative : 42 Classification report : precision recall f1-score support 1 0.81 0.70 0.75 30 0 0.82 0.89 0.86 47 accuracy 0.82 77 macro avg 0.82 0.80 0.80 77 weighted avg 0.82 0.82 0.82 77 Akurasi fold ini adalah : 81.82 % Presisi fold ini adalah : 80.77 % Recall fold ini adalah : 70.0 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0] Confusion matrix : [[21 9] [ 5 42]] True positive : 21 False negative : 9 False positive : 5 True negative : 42 Classification report : precision recall f1-score support 1 0.81 0.70 0.75 30 0 0.82 0.89 0.86 47 accuracy 0.82 77 macro avg 0.82 0.80 0.80 77 weighted avg 0.82 0.82 0.82 77 Akurasi fold ini adalah : 81.82 % Presisi fold ini adalah : 80.77 % Recall fold ini adalah : 70.0 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0] Confusion matrix : [[22 8] [ 6 41]] True positive : 22 False negative : 8 False positive : 6 True negative : 41 Classification report : precision recall f1-score support 1 0.79 0.73 0.76 30 0 0.84 0.87 0.85 47 accuracy 0.82 77 macro avg 0.81 0.80 0.81 77 weighted avg 0.82 0.82 0.82 77 Akurasi fold ini adalah : 81.82 % Presisi fold ini adalah : 78.57 % Recall fold ini adalah : 73.33 % =========================================================== >>> FOLD KE 7 Panjang data test : 77 Panjang data train : 691 Data Test : [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0] Confusion matrix : [[11 3] [14 49]] True positive : 11 False negative : 3 False positive : 14 True negative : 49 Classification report : precision recall f1-score support 1 0.44 0.79 0.56 14 0 0.94 0.78 0.85 63 accuracy 0.78 77 macro avg 0.69 0.78 0.71 77 weighted avg 0.85 0.78 0.80 77 Akurasi fold ini adalah : 77.92 % Presisi fold ini adalah : 44.0 % Recall fold ini adalah : 78.57 % =========================================================== w0 {'Insulin': {109.0: 1.0, 108.0: 0.0}} 0.6888087789613535 w1 {'Glucose': {128.0: 0.0, 127.0: 0.0}} 0.8329314022659339 w2 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.5201611145238069 w3 {'Glucose': {128.0: 1.0, 127.0: 0.0}} 0.7319623588581877 w4 {'SkinThickness': {24.0: 0.0, 23.0: 0.0}} 0.21831868884815245 w5 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.3437521512876085 w6 {'Age': {29.0: 1.0, 28.0: 0.0}} 0.3978778800754579 w7 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.41825012276803114 w8 {'Pregnancies': {6.0: 0.0, 7.0: 1.0}} 0.2707586470384025 w9 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.07221079157942908 w10 {'Glucose': {128.0: 0.0, 127.0: 1.0}} 0.09593780719413367 w11 {'SkinThickness': {24.0: 0.0, 23.0: 0.0}} 0.08281102508924519 w12 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.0495975946150453 w13 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.031242043390614455 w14 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.03636207163091169 w15 {'BloodPressure': {64.0: 0.0, 63.0: 1.0}} 0.05465969245073296 w16 {'DiabetesPedigreeFunction': {1.5270000000000001: 0.0, 0.527: 1.0}} 0.043454268804588345 w17 {'Insulin': {109.0: 0.0, 108.0: 1.0}} 0.03253435571037753 w18 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.03304134886877499 w19 {'Age': {29.0: 0.0, 28.0: 1.0}} 0.027492320414903387 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0] Confusion matrix : [[ 7 7] [ 9 54]] True positive : 7 False negative : 7 False positive : 9 True negative : 54 Classification report : precision recall f1-score support 1 0.44 0.50 0.47 14 0 0.89 0.86 0.87 63 accuracy 0.79 77 macro avg 0.66 0.68 0.67 77 weighted avg 0.80 0.79 0.80 77 Akurasi fold ini adalah : 79.22 % Presisi fold ini adalah : 43.75 % Recall fold ini adalah : 50.0 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0] Confusion matrix : [[11 3] [11 52]] True positive : 11 False negative : 3 False positive : 11 True negative : 52 Classification report : precision recall f1-score support 1 0.50 0.79 0.61 14 0 0.95 0.83 0.88 63 accuracy 0.82 77 macro avg 0.72 0.81 0.75 77 weighted avg 0.86 0.82 0.83 77 Akurasi fold ini adalah : 81.82 % Presisi fold ini adalah : 50.0 % Recall fold ini adalah : 78.57 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0] Confusion matrix : [[11 3] [11 52]] True positive : 11 False negative : 3 False positive : 11 True negative : 52 Classification report : precision recall f1-score support 1 0.50 0.79 0.61 14 0 0.95 0.83 0.88 63 accuracy 0.82 77 macro avg 0.72 0.81 0.75 77 weighted avg 0.86 0.82 0.83 77 Akurasi fold ini adalah : 81.82 % Presisi fold ini adalah : 50.0 % Recall fold ini adalah : 78.57 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0] Confusion matrix : [[11 3] [11 52]] True positive : 11 False negative : 3 False positive : 11 True negative : 52 Classification report : precision recall f1-score support 1 0.50 0.79 0.61 14 0 0.95 0.83 0.88 63 accuracy 0.82 77 macro avg 0.72 0.81 0.75 77 weighted avg 0.86 0.82 0.83 77 Akurasi fold ini adalah : 81.82 % Presisi fold ini adalah : 50.0 % Recall fold ini adalah : 78.57 % =========================================================== >>> FOLD KE 8 Panjang data test : 77 Panjang data train : 691 Data Test : [1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0] Confusion matrix : [[22 3] [11 41]] True positive : 22 False negative : 3 False positive : 11 True negative : 41 Classification report : precision recall f1-score support 1 0.67 0.88 0.76 25 0 0.93 0.79 0.85 52 accuracy 0.82 77 macro avg 0.80 0.83 0.81 77 weighted avg 0.85 0.82 0.82 77 Akurasi fold ini adalah : 81.82 % Presisi fold ini adalah : 66.67 % Recall fold ini adalah : 88.0 % =========================================================== w0 {'Insulin': {109.0: 1.0, 108.0: 0.0}} 0.6693622755365096 w1 {'Glucose': {128.0: 0.0, 127.0: 0.0}} 0.8759576212179595 w2 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.505686602795264 w3 {'Glucose': {128.0: 1.0, 127.0: 0.0}} 0.7010801827725435 w4 {'Age': {29.0: 1.0, 28.0: 0.0}} 0.4175266845453082 w5 {'SkinThickness': {24.0: 0.0, 23.0: 0.0}} 0.29363682293990273 w6 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.28302621374581216 w7 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.2930490862918666 w8 {'Pregnancies': {6.0: 0.0, 7.0: 1.0}} 0.2164413429329435 w9 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.1079923085953868 w10 {'Glucose': {128.0: 0.0, 127.0: 1.0}} 0.0915749922129381 w11 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.05786855279972085 w12 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.05748682301366659 w13 {'BloodPressure': {64.0: 0.0, 63.0: 1.0}} 0.06690714217723091 w14 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.034367688221293866 w15 {'Insulin': {109.0: 0.0, 108.0: 1.0}} 0.0433434162966958 w16 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.02608286857135777 w17 {'Age': {29.0: 0.0, 28.0: 1.0}} 0.02493713868162798 w18 {'Pregnancies': {6.0: 0.0, 7.0: 1.0}} 0.02440038848666421 w19 {'DiabetesPedigreeFunction': {1.5270000000000001: 0.0, 0.527: 1.0}} 0.02010147537943155 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0] Confusion matrix : [[24 1] [11 41]] True positive : 24 False negative : 1 False positive : 11 True negative : 41 Classification report : precision recall f1-score support 1 0.69 0.96 0.80 25 0 0.98 0.79 0.87 52 accuracy 0.84 77 macro avg 0.83 0.87 0.84 77 weighted avg 0.88 0.84 0.85 77 Akurasi fold ini adalah : 84.42 % Presisi fold ini adalah : 68.57 % Recall fold ini adalah : 96.0 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0] Confusion matrix : [[21 4] [ 7 45]] True positive : 21 False negative : 4 False positive : 7 True negative : 45 Classification report : precision recall f1-score support 1 0.75 0.84 0.79 25 0 0.92 0.87 0.89 52 accuracy 0.86 77 macro avg 0.83 0.85 0.84 77 weighted avg 0.86 0.86 0.86 77 Akurasi fold ini adalah : 85.71 % Presisi fold ini adalah : 75.0 % Recall fold ini adalah : 84.0 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0] Confusion matrix : [[21 4] [ 7 45]] True positive : 21 False negative : 4 False positive : 7 True negative : 45 Classification report : precision recall f1-score support 1 0.75 0.84 0.79 25 0 0.92 0.87 0.89 52 accuracy 0.86 77 macro avg 0.83 0.85 0.84 77 weighted avg 0.86 0.86 0.86 77 Akurasi fold ini adalah : 85.71 % Presisi fold ini adalah : 75.0 % Recall fold ini adalah : 84.0 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0] Confusion matrix : [[21 4] [ 7 45]] True positive : 21 False negative : 4 False positive : 7 True negative : 45 Classification report : precision recall f1-score support 1 0.75 0.84 0.79 25 0 0.92 0.87 0.89 52 accuracy 0.86 77 macro avg 0.83 0.85 0.84 77 weighted avg 0.86 0.86 0.86 77 Akurasi fold ini adalah : 85.71 % Presisi fold ini adalah : 75.0 % Recall fold ini adalah : 84.0 % =========================================================== >>> FOLD KE 9 Panjang data test : 77 Panjang data train : 691 Data Test : [0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0] Confusion matrix : [[14 10] [13 40]] True positive : 14 False negative : 10 False positive : 13 True negative : 40 Classification report : precision recall f1-score support 1 0.52 0.58 0.55 24 0 0.80 0.75 0.78 53 accuracy 0.70 77 macro avg 0.66 0.67 0.66 77 weighted avg 0.71 0.70 0.71 77 Akurasi fold ini adalah : 70.13 % Presisi fold ini adalah : 51.85 % Recall fold ini adalah : 58.33 % =========================================================== w0 {'Insulin': {109.0: 1.0, 108.0: 0.0}} 0.6823128383942417 w1 {'Glucose': {128.0: 0.0, 127.0: 0.0}} 0.8835561392906363 w2 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.5500824827202888 w3 {'Glucose': {128.0: 1.0, 127.0: 0.0}} 0.7625985183825795 w4 {'Age': {29.0: 1.0, 28.0: 0.0}} 0.42188890310518035 w5 {'SkinThickness': {24.0: 0.0, 23.0: 0.0}} 0.3307855136714821 w6 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.2792913260911633 w7 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.3668689375474824 w8 {'Pregnancies': {6.0: 0.0, 7.0: 1.0}} 0.17932899057657858 w9 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.11469237122000214 w10 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.050063828550624107 w11 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.07164876317059546 w12 {'Insulin': {109.0: 0.0, 108.0: 1.0}} 0.1040988716615704 w13 {'Glucose': {128.0: 0.0, 127.0: 1.0}} 0.06424878610093913 w14 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.03395835112016343 w15 {'SkinThickness': {24.0: 0.0, 23.0: 0.0}} 0.021236701233633866 w16 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.017527072246665158 w17 {'Insulin': {109.0: 1.0, 108.0: 0.0}} 0.011218774909028182 w18 {'BloodPressure': {64.0: 0.0, 63.0: 1.0}} 0.022364240282909672 w19 {'Pregnancies': {6.0: 0.0, 7.0: 1.0}} 0.015818639002555184 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0] Confusion matrix : [[17 7] [15 38]] True positive : 17 False negative : 7 False positive : 15 True negative : 38 Classification report : precision recall f1-score support 1 0.53 0.71 0.61 24 0 0.84 0.72 0.78 53 accuracy 0.71 77 macro avg 0.69 0.71 0.69 77 weighted avg 0.75 0.71 0.72 77 Akurasi fold ini adalah : 71.43 % Presisi fold ini adalah : 53.12 % Recall fold ini adalah : 70.83 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0] Confusion matrix : [[14 10] [11 42]] True positive : 14 False negative : 10 False positive : 11 True negative : 42 Classification report : precision recall f1-score support 1 0.56 0.58 0.57 24 0 0.81 0.79 0.80 53 accuracy 0.73 77 macro avg 0.68 0.69 0.69 77 weighted avg 0.73 0.73 0.73 77 Akurasi fold ini adalah : 72.73 % Presisi fold ini adalah : 56.0 % Recall fold ini adalah : 58.33 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0] Confusion matrix : [[13 11] [10 43]] True positive : 13 False negative : 11 False positive : 10 True negative : 43 Classification report : precision recall f1-score support 1 0.57 0.54 0.55 24 0 0.80 0.81 0.80 53 accuracy 0.73 77 macro avg 0.68 0.68 0.68 77 weighted avg 0.72 0.73 0.73 77 Akurasi fold ini adalah : 72.73 % Presisi fold ini adalah : 56.52 % Recall fold ini adalah : 54.17 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0] Confusion matrix : [[13 11] [10 43]] True positive : 13 False negative : 11 False positive : 10 True negative : 43 Classification report : precision recall f1-score support 1 0.57 0.54 0.55 24 0 0.80 0.81 0.80 53 accuracy 0.73 77 macro avg 0.68 0.68 0.68 77 weighted avg 0.72 0.73 0.73 77 Akurasi fold ini adalah : 72.73 % Presisi fold ini adalah : 56.52 % Recall fold ini adalah : 54.17 % =========================================================== >>> FOLD KE 10 Panjang data test : 75 Panjang data train : 693 Data Test : [1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0] Confusion matrix : [[22 8] [ 6 39]] True positive : 22 False negative : 8 False positive : 6 True negative : 39 Classification report : precision recall f1-score support 1 0.79 0.73 0.76 30 0 0.83 0.87 0.85 45 accuracy 0.81 75 macro avg 0.81 0.80 0.80 75 weighted avg 0.81 0.81 0.81 75 Akurasi fold ini adalah : 81.33 % Presisi fold ini adalah : 78.57 % Recall fold ini adalah : 73.33 % =========================================================== w0 {'Insulin': {109.0: 1.0, 108.0: 0.0}} 0.660851585868589 w1 {'Glucose': {128.0: 0.0, 127.0: 0.0}} 0.9098417100901979 w2 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.5671360924694835 w3 {'Glucose': {128.0: 1.0, 127.0: 0.0}} 0.6995240111822959 w4 {'Pregnancies': {6.0: 0.0, 7.0: 1.0}} 0.49968860121553443 w5 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.2834307764954357 w6 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.3370280832596378 w7 {'Age': {29.0: 1.0, 28.0: 0.0}} 0.21038953803860658 w8 {'SkinThickness': {24.0: 0.0, 23.0: 0.0}} 0.11839518543185859 w9 {'SkinThickness': {24.0: 1.0, 23.0: 0.0}} 0.09815814809742371 w10 {'Pregnancies': {6.0: 1.0, 7.0: 0.0}} 0.06740271753153945 w11 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.09653853411314195 w12 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.06509853386142073 w13 {'Glucose': {128.0: 0.0, 127.0: 1.0}} 0.07740453056313128 w14 {'Pregnancies': {6.0: 0.0, 7.0: 0.0}} 0.04434334187970054 w15 {'Pregnancies': {6.0: 1.0, 7.0: 0.0}} 0.047024942047195886 w16 {'Age': {29.0: 1.0, 28.0: 0.0}} 0.0612240284178165 w17 {'Pregnancies': {6.0: 0.0, 7.0: 0.0}} 0.036233317715199304 w18 {'Pregnancies': {6.0: 1.0, 7.0: 0.0}} 0.03626821358670146 w19 {'SkinThickness': {24.0: 0.0, 23.0: 0.0}} 0.019201534471161238 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0] Confusion matrix : [[22 8] [ 6 39]] True positive : 22 False negative : 8 False positive : 6 True negative : 39 Classification report : precision recall f1-score support 1 0.79 0.73 0.76 30 0 0.83 0.87 0.85 45 accuracy 0.81 75 macro avg 0.81 0.80 0.80 75 weighted avg 0.81 0.81 0.81 75 Akurasi fold ini adalah : 81.33 % Presisi fold ini adalah : 78.57 % Recall fold ini adalah : 73.33 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0] Confusion matrix : [[19 11] [ 5 40]] True positive : 19 False negative : 11 False positive : 5 True negative : 40 Classification report : precision recall f1-score support 1 0.79 0.63 0.70 30 0 0.78 0.89 0.83 45 accuracy 0.79 75 macro avg 0.79 0.76 0.77 75 weighted avg 0.79 0.79 0.78 75 Akurasi fold ini adalah : 78.67 % Presisi fold ini adalah : 79.17 % Recall fold ini adalah : 63.33 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0] Confusion matrix : [[19 11] [ 5 40]] True positive : 19 False negative : 11 False positive : 5 True negative : 40 Classification report : precision recall f1-score support 1 0.79 0.63 0.70 30 0 0.78 0.89 0.83 45 accuracy 0.79 75 macro avg 0.79 0.76 0.77 75 weighted avg 0.79 0.79 0.78 75 Akurasi fold ini adalah : 78.67 % Presisi fold ini adalah : 79.17 % Recall fold ini adalah : 63.33 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0] Confusion matrix : [[19 11] [ 5 40]] True positive : 19 False negative : 11 False positive : 5 True negative : 40 Classification report : precision recall f1-score support 1 0.79 0.63 0.70 30 0 0.78 0.89 0.83 45 accuracy 0.79 75 macro avg 0.79 0.76 0.77 75 weighted avg 0.79 0.79 0.78 75 Akurasi fold ini adalah : 78.67 % Presisi fold ini adalah : 79.17 % Recall fold ini adalah : 63.33 % ===========================================================
nk_acc_c45,nk_acc_adb,adatree,treec45s=k_fold(dnoknn,20,sknar)
>>> FOLD KE 1 Panjang data test : 77 Panjang data train : 691 Data Test : [1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [1.0, 0.0, 0.0, 0.0, 1, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1, 0.0, 1.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1, 0.0, 1, 0.0, 0.0, 1.0, 0.0, 0.0, 1, 0, 0, 0.0, 1.0, 1.0, 0, 1, 0.0, 1.0, 0.0, 0, 1.0, 0.0, 1, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 1, 0.0, 0.0, 0.0, 0, 0.0] Confusion matrix : [[13 19] [ 9 36]] True positive : 13 False negative : 19 False positive : 9 True negative : 36 Classification report : precision recall f1-score support 1 0.59 0.41 0.48 32 0 0.65 0.80 0.72 45 accuracy 0.64 77 macro avg 0.62 0.60 0.60 77 weighted avg 0.63 0.64 0.62 77 Akurasi fold ini adalah : 63.64 % Presisi fold ini adalah : 59.09 % Recall fold ini adalah : 40.62 % =========================================================== w0 {'Glucose': {127: 0.0, 128: 1.0}} 1.0889875752939042 w1 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.5025458422691002 w2 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.5190441987327198 w3 {'Age': {28: 0.0, 29: 1.0}} 0.5417481684658161 w4 {'DiabetesPedigreeFunction': {0.527: 0.0, 1.5270000000000001: 1.0}} 0.426206634910509 w5 {'Pregnancies': {6: 0.0, 7: 1.0}} 0.3320514087499889 w6 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.11892145284649915 w7 {'Glucose': {127: 0.0, 128: 0.0}} 0.15301554206415063 w8 {'Glucose': {127: 1.0, 128: 0.0}} 0.1487266100095366 w9 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.08488968997826894 w10 {'DiabetesPedigreeFunction': {0.527: 0.0, 1.5270000000000001: 0.0}} 0.06635841408909218 w11 {'DiabetesPedigreeFunction': {0.527: 1.0, 1.5270000000000001: 0.0}} 0.08323090265374652 w12 {'BloodPressure': {69: 0.0, 68: 1.0}} 0.08948616027058726 w13 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.04931581023050052 w14 {'Pregnancies': {6: 0.0, 7: 0.0}} 0.029028726606153136 w15 {'Pregnancies': {6: 1.0, 7: 0.0}} 0.025737113094545957 w16 {'Insulin': {120: 0.0, 121: 1.0}} 0.03229422147239687 w17 {'DiabetesPedigreeFunction': {0.527: 1.0, 1.5270000000000001: 0.0}} 0.016588973158100033 w18 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.009522557731094212 w19 {'Age': {28: 0.0, 29: 1.0}} 0.013991974547398135 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0] Confusion matrix : [[16 16] [12 33]] True positive : 16 False negative : 16 False positive : 12 True negative : 33 Classification report : precision recall f1-score support 1 0.57 0.50 0.53 32 0 0.67 0.73 0.70 45 accuracy 0.64 77 macro avg 0.62 0.62 0.62 77 weighted avg 0.63 0.64 0.63 77 Akurasi fold ini adalah : 63.64 % Presisi fold ini adalah : 57.14 % Recall fold ini adalah : 50.0 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0] Confusion matrix : [[18 14] [ 8 37]] True positive : 18 False negative : 14 False positive : 8 True negative : 37 Classification report : precision recall f1-score support 1 0.69 0.56 0.62 32 0 0.73 0.82 0.77 45 accuracy 0.71 77 macro avg 0.71 0.69 0.70 77 weighted avg 0.71 0.71 0.71 77 Akurasi fold ini adalah : 71.43 % Presisi fold ini adalah : 69.23 % Recall fold ini adalah : 56.25 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0] Confusion matrix : [[17 15] [ 7 38]] True positive : 17 False negative : 15 False positive : 7 True negative : 38 Classification report : precision recall f1-score support 1 0.71 0.53 0.61 32 0 0.72 0.84 0.78 45 accuracy 0.71 77 macro avg 0.71 0.69 0.69 77 weighted avg 0.71 0.71 0.71 77 Akurasi fold ini adalah : 71.43 % Presisi fold ini adalah : 70.83 % Recall fold ini adalah : 53.12 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0] Confusion matrix : [[17 15] [ 7 38]] True positive : 17 False negative : 15 False positive : 7 True negative : 38 Classification report : precision recall f1-score support 1 0.71 0.53 0.61 32 0 0.72 0.84 0.78 45 accuracy 0.71 77 macro avg 0.71 0.69 0.69 77 weighted avg 0.71 0.71 0.71 77 Akurasi fold ini adalah : 71.43 % Presisi fold ini adalah : 70.83 % Recall fold ini adalah : 53.12 % =========================================================== >>> FOLD KE 2 Panjang data test : 77 Panjang data train : 691 Data Test : [0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [0.0, 0.0, 0, 0, 0, 1.0, 0, 1.0, 0.0, 0.0, 0.0, 1.0, 0, 0, 1, 0.0, 0.0, 0, 1.0, 0.0, 0, 0.0, 0, 1.0, 1.0, 0, 0, 0.0, 0.0, 0, 1.0, 0.0, 0.0, 1.0, 1, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1, 0.0, 1, 0.0, 1.0, 0, 1, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0, 1.0, 0.0, 1.0, 1.0] Confusion matrix : [[12 10] [12 43]] True positive : 12 False negative : 10 False positive : 12 True negative : 43 Classification report : precision recall f1-score support 1 0.50 0.55 0.52 22 0 0.81 0.78 0.80 55 accuracy 0.71 77 macro avg 0.66 0.66 0.66 77 weighted avg 0.72 0.71 0.72 77 Akurasi fold ini adalah : 71.43 % Presisi fold ini adalah : 50.0 % Recall fold ini adalah : 54.55 % =========================================================== w0 {'Glucose': {128: 1.0, 127: 0.0}} 1.0209952947370655 w1 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.4443497343897111 w2 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.5317805593781872 w3 {'Age': {29: 1.0, 28: 0.0}} 0.5279128139420783 w4 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.4146230150594495 w5 {'Pregnancies': {6: 0.0, 7: 1.0}} 0.3671718197505588 w6 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.13217671357044677 w7 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.17142766848161495 w8 {'Glucose': {128: 0.0, 127: 1.0}} 0.14518655468172675 w9 {'BloodPressure': {69: 0.0, 68: 1.0}} 0.12478187451192112 w10 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.08046556132813219 w11 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.0742041557322578 w12 {'DiabetesPedigreeFunction': {1.5270000000000001: 0.0, 0.527: 1.0}} 0.06431296384845683 w13 {'SkinThickness': {32: 1.0, 31: 0.0}} 0.0870846447780481 w14 {'Pregnancies': {6: 0.0, 7: 0.0}} 0.027170114652399855 w15 {'Pregnancies': {6: 1.0, 7: 0.0}} 0.03170591108979495 w16 {'Insulin': {120: 0.0, 121: 1.0}} 0.03802724951506011 w17 {'DiabetesPedigreeFunction': {1.5270000000000001: 0.0, 0.527: 1.0}} 0.031341729182533494 w18 {'Age': {29: 1.0, 28: 0.0}} 0.02135179914348787 w19 {'Pregnancies': {6: 0.0, 7: 0.0}} 0.011823277593309348 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1] Confusion matrix : [[15 7] [11 44]] True positive : 15 False negative : 7 False positive : 11 True negative : 44 Classification report : precision recall f1-score support 1 0.58 0.68 0.62 22 0 0.86 0.80 0.83 55 accuracy 0.77 77 macro avg 0.72 0.74 0.73 77 weighted avg 0.78 0.77 0.77 77 Akurasi fold ini adalah : 76.62 % Presisi fold ini adalah : 57.69 % Recall fold ini adalah : 68.18 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1] Confusion matrix : [[11 11] [ 6 49]] True positive : 11 False negative : 11 False positive : 6 True negative : 49 Classification report : precision recall f1-score support 1 0.65 0.50 0.56 22 0 0.82 0.89 0.85 55 accuracy 0.78 77 macro avg 0.73 0.70 0.71 77 weighted avg 0.77 0.78 0.77 77 Akurasi fold ini adalah : 77.92 % Presisi fold ini adalah : 64.71 % Recall fold ini adalah : 50.0 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1] Confusion matrix : [[11 11] [ 6 49]] True positive : 11 False negative : 11 False positive : 6 True negative : 49 Classification report : precision recall f1-score support 1 0.65 0.50 0.56 22 0 0.82 0.89 0.85 55 accuracy 0.78 77 macro avg 0.73 0.70 0.71 77 weighted avg 0.77 0.78 0.77 77 Akurasi fold ini adalah : 77.92 % Presisi fold ini adalah : 64.71 % Recall fold ini adalah : 50.0 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1] Confusion matrix : [[11 11] [ 6 49]] True positive : 11 False negative : 11 False positive : 6 True negative : 49 Classification report : precision recall f1-score support 1 0.65 0.50 0.56 22 0 0.82 0.89 0.85 55 accuracy 0.78 77 macro avg 0.73 0.70 0.71 77 weighted avg 0.77 0.78 0.77 77 Akurasi fold ini adalah : 77.92 % Presisi fold ini adalah : 64.71 % Recall fold ini adalah : 50.0 % =========================================================== >>> FOLD KE 3 Panjang data test : 77 Panjang data train : 691 Data Test : [1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [1.0, 1.0, 0, 0, 0.0, 1.0, 1.0, 0.0, 0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0, 0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0, 0.0, 1, 1.0, 0.0, 1.0, 0, 0, 1.0, 1.0, 0.0, 0.0, 0.0, 0, 1.0, 0, 1, 1.0, 0.0, 1.0, 0, 1.0, 1.0, 0.0, 0.0, 1.0, 0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0] Confusion matrix : [[26 8] [12 31]] True positive : 26 False negative : 8 False positive : 12 True negative : 31 Classification report : precision recall f1-score support 1 0.68 0.76 0.72 34 0 0.79 0.72 0.76 43 accuracy 0.74 77 macro avg 0.74 0.74 0.74 77 weighted avg 0.75 0.74 0.74 77 Akurasi fold ini adalah : 74.03 % Presisi fold ini adalah : 68.42 % Recall fold ini adalah : 76.47 % =========================================================== w0 {'Glucose': {128: 1.0, 127: 0.0}} 1.0061808442091453 w1 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.5024591601430485 w2 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.5458944349971626 w3 {'Age': {29: 1.0, 28: 0.0}} 0.5695704704928805 w4 {'Pregnancies': {6: 0.0, 7: 1.0}} 0.4214000652249165 w5 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.2130276371438679 w6 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.20335518906239436 w7 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.26838600795710177 w8 {'Glucose': {128: 0.0, 127: 1.0}} 0.13765369097981725 w9 {'Pregnancies': {6: 1.0, 7: 0.0}} 0.061953519844992005 w10 {'SkinThickness': {32: 1.0, 31: 0.0}} 0.11929047856460782 w11 {'BloodPressure': {69: 0.0, 68: 1.0}} 0.08374453782811403 w12 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.026344139933802586 w13 {'Pregnancies': {6: 0.0, 7: 0.0}} 0.031974556840658475 w14 {'Pregnancies': {6: 1.0, 7: 0.0}} 0.030207621085582063 w15 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.014794620728060641 w16 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.01677426136374761 w17 {'SkinThickness': {32: 0.0, 31: 1.0}} 0.02045416069622986 w18 {'DiabetesPedigreeFunction': {1.5270000000000001: 0.0, 0.527: 0.0}} 0.017639517244763005 w19 {'DiabetesPedigreeFunction': {1.5270000000000001: 0.0, 0.527: 1.0}} 0.01976314962763356 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1] Confusion matrix : [[27 7] [11 32]] True positive : 27 False negative : 7 False positive : 11 True negative : 32 Classification report : precision recall f1-score support 1 0.71 0.79 0.75 34 0 0.82 0.74 0.78 43 accuracy 0.77 77 macro avg 0.77 0.77 0.77 77 weighted avg 0.77 0.77 0.77 77 Akurasi fold ini adalah : 76.62 % Presisi fold ini adalah : 71.05 % Recall fold ini adalah : 79.41 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1] Confusion matrix : [[23 11] [ 5 38]] True positive : 23 False negative : 11 False positive : 5 True negative : 38 Classification report : precision recall f1-score support 1 0.82 0.68 0.74 34 0 0.78 0.88 0.83 43 accuracy 0.79 77 macro avg 0.80 0.78 0.78 77 weighted avg 0.80 0.79 0.79 77 Akurasi fold ini adalah : 79.22 % Presisi fold ini adalah : 82.14 % Recall fold ini adalah : 67.65 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1] Confusion matrix : [[23 11] [ 5 38]] True positive : 23 False negative : 11 False positive : 5 True negative : 38 Classification report : precision recall f1-score support 1 0.82 0.68 0.74 34 0 0.78 0.88 0.83 43 accuracy 0.79 77 macro avg 0.80 0.78 0.78 77 weighted avg 0.80 0.79 0.79 77 Akurasi fold ini adalah : 79.22 % Presisi fold ini adalah : 82.14 % Recall fold ini adalah : 67.65 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1] Confusion matrix : [[23 11] [ 5 38]] True positive : 23 False negative : 11 False positive : 5 True negative : 38 Classification report : precision recall f1-score support 1 0.82 0.68 0.74 34 0 0.78 0.88 0.83 43 accuracy 0.79 77 macro avg 0.80 0.78 0.78 77 weighted avg 0.80 0.79 0.79 77 Akurasi fold ini adalah : 79.22 % Presisi fold ini adalah : 82.14 % Recall fold ini adalah : 67.65 % =========================================================== >>> FOLD KE 4 Panjang data test : 77 Panjang data train : 691 Data Test : [1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [1.0, 0.0, 0.0, 0.0, 0.0, 1, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0, 0.0, 0, 0.0, 0.0, 0.0, 0, 1, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1, 0, 0.0, 1.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 0, 0.0, 1, 1, 1.0, 0.0, 1.0, 1.0, 1.0, 0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0, 0] Confusion matrix : [[15 15] [14 33]] True positive : 15 False negative : 15 False positive : 14 True negative : 33 Classification report : precision recall f1-score support 1 0.52 0.50 0.51 30 0 0.69 0.70 0.69 47 accuracy 0.62 77 macro avg 0.60 0.60 0.60 77 weighted avg 0.62 0.62 0.62 77 Akurasi fold ini adalah : 62.34 % Presisi fold ini adalah : 51.72 % Recall fold ini adalah : 50.0 % =========================================================== w0 {'Glucose': {128: 1.0, 127: 0.0}} 1.0660735328813402 w1 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.4673591524883059 w2 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.5986289856457404 w3 {'Age': {29: 1.0, 28: 0.0}} 0.560385160090479 w4 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.46902810782250776 w5 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.2079901522028697 w6 {'Pregnancies': {6: 0.0, 7: 1.0}} 0.2945982213619648 w7 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.25109746832551766 w8 {'Glucose': {128: 0.0, 127: 1.0}} 0.18427836366849215 w9 {'SkinThickness': {32: 1.0, 31: 0.0}} 0.11411890226866249 w10 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.1301425204715043 w11 {'Insulin': {120: 0.0, 121: 1.0}} 0.029896108432542205 w12 {'Age': {29: 1.0, 28: 0.0}} 0.039943769899428445 w13 {'BloodPressure': {69: 0.0, 68: 1.0}} 0.02696598575730403 w14 {'Insulin': {120: 1.0, 121: 1.0}} 0.028974185664093842 w15 {'Insulin': {120: 0.0, 121: 1.0}} 0.014672964631899488 w16 {'BMI': {28.8: 0.0, 27.8: 1.0}} 0.00919335448427747 w17 {'Glucose': {128: 0.0, 127: 1.0}} 0.014862142141176477 w18 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 1.0}} 0.010488238430331783 w19 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.01185926611851229 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0] Confusion matrix : [[17 13] [13 34]] True positive : 17 False negative : 13 False positive : 13 True negative : 34 Classification report : precision recall f1-score support 1 0.57 0.57 0.57 30 0 0.72 0.72 0.72 47 accuracy 0.66 77 macro avg 0.65 0.65 0.65 77 weighted avg 0.66 0.66 0.66 77 Akurasi fold ini adalah : 66.23 % Presisi fold ini adalah : 56.67 % Recall fold ini adalah : 56.67 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0] Confusion matrix : [[14 16] [ 9 38]] True positive : 14 False negative : 16 False positive : 9 True negative : 38 Classification report : precision recall f1-score support 1 0.61 0.47 0.53 30 0 0.70 0.81 0.75 47 accuracy 0.68 77 macro avg 0.66 0.64 0.64 77 weighted avg 0.67 0.68 0.67 77 Akurasi fold ini adalah : 67.53 % Presisi fold ini adalah : 60.87 % Recall fold ini adalah : 46.67 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0] Confusion matrix : [[14 16] [ 9 38]] True positive : 14 False negative : 16 False positive : 9 True negative : 38 Classification report : precision recall f1-score support 1 0.61 0.47 0.53 30 0 0.70 0.81 0.75 47 accuracy 0.68 77 macro avg 0.66 0.64 0.64 77 weighted avg 0.67 0.68 0.67 77 Akurasi fold ini adalah : 67.53 % Presisi fold ini adalah : 60.87 % Recall fold ini adalah : 46.67 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0] Confusion matrix : [[14 16] [ 9 38]] True positive : 14 False negative : 16 False positive : 9 True negative : 38 Classification report : precision recall f1-score support 1 0.61 0.47 0.53 30 0 0.70 0.81 0.75 47 accuracy 0.68 77 macro avg 0.66 0.64 0.64 77 weighted avg 0.67 0.68 0.67 77 Akurasi fold ini adalah : 67.53 % Presisi fold ini adalah : 60.87 % Recall fold ini adalah : 46.67 % =========================================================== >>> FOLD KE 5 Panjang data test : 77 Panjang data train : 691 Data Test : [1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [1.0, 1.0, 0, 0, 0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0, 0.0, 0.0, 0, 0.0, 0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0, 1.0, 0.0, 0.0, 1.0, 1.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0, 0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0, 0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0, 0.0, 1.0, 0.0, 0.0, 0, 0.0, 0.0, 0] Confusion matrix : [[16 11] [ 7 43]] True positive : 16 False negative : 11 False positive : 7 True negative : 43 Classification report : precision recall f1-score support 1 0.70 0.59 0.64 27 0 0.80 0.86 0.83 50 accuracy 0.77 77 macro avg 0.75 0.73 0.73 77 weighted avg 0.76 0.77 0.76 77 Akurasi fold ini adalah : 76.62 % Presisi fold ini adalah : 69.57 % Recall fold ini adalah : 59.26 % =========================================================== w0 {'Glucose': {128: 1.0, 127: 0.0}} 1.013575245982202 w1 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.47643958671141695 w2 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.5609508265675935 w3 {'Age': {29: 1.0, 28: 0.0}} 0.5379406226090246 w4 {'DiabetesPedigreeFunction': {1.5270000000000001: 0.0, 0.527: 0.0}} 0.42895937150697955 w5 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.2909040344487451 w6 {'Pregnancies': {6: 0.0, 7: 1.0}} 0.21635093576834402 w7 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.3044435349211881 w8 {'Glucose': {128: 0.0, 127: 1.0}} 0.1542386560887124 w9 {'BloodPressure': {69: 0.0, 68: 1.0}} 0.1259728325696127 w10 {'Pregnancies': {6: 0.0, 7: 1.0}} 0.1135573286672109 w11 {'SkinThickness': {32: 1.0, 31: 1.0}} 0.05529579754715737 w12 {'SkinThickness': {32: 1.0, 31: 0.0}} 0.06416438693030486 w13 {'Insulin': {120: 1.0, 121: 1.0}} 0.023558246853703415 w14 {'Insulin': {120: 0.0, 121: 1.0}} 0.029846572869663718 w15 {'Pregnancies': {6: 0.0, 7: 1.0}} 0.02014109367121675 w16 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.033462294652272476 w17 {'BloodPressure': {69: 0.0, 68: 1.0}} 0.02208120763264851 w18 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.017142479498485876 w19 {'Pregnancies': {6: 1.0, 7: 1.0}} 0.010949551963433196 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0] Confusion matrix : [[16 11] [ 7 43]] True positive : 16 False negative : 11 False positive : 7 True negative : 43 Classification report : precision recall f1-score support 1 0.70 0.59 0.64 27 0 0.80 0.86 0.83 50 accuracy 0.77 77 macro avg 0.75 0.73 0.73 77 weighted avg 0.76 0.77 0.76 77 Akurasi fold ini adalah : 76.62 % Presisi fold ini adalah : 69.57 % Recall fold ini adalah : 59.26 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0] Confusion matrix : [[16 11] [ 5 45]] True positive : 16 False negative : 11 False positive : 5 True negative : 45 Classification report : precision recall f1-score support 1 0.76 0.59 0.67 27 0 0.80 0.90 0.85 50 accuracy 0.79 77 macro avg 0.78 0.75 0.76 77 weighted avg 0.79 0.79 0.79 77 Akurasi fold ini adalah : 79.22 % Presisi fold ini adalah : 76.19 % Recall fold ini adalah : 59.26 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0] Confusion matrix : [[16 11] [ 5 45]] True positive : 16 False negative : 11 False positive : 5 True negative : 45 Classification report : precision recall f1-score support 1 0.76 0.59 0.67 27 0 0.80 0.90 0.85 50 accuracy 0.79 77 macro avg 0.78 0.75 0.76 77 weighted avg 0.79 0.79 0.79 77 Akurasi fold ini adalah : 79.22 % Presisi fold ini adalah : 76.19 % Recall fold ini adalah : 59.26 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0] Confusion matrix : [[16 11] [ 5 45]] True positive : 16 False negative : 11 False positive : 5 True negative : 45 Classification report : precision recall f1-score support 1 0.76 0.59 0.67 27 0 0.80 0.90 0.85 50 accuracy 0.79 77 macro avg 0.78 0.75 0.76 77 weighted avg 0.79 0.79 0.79 77 Akurasi fold ini adalah : 79.22 % Presisi fold ini adalah : 76.19 % Recall fold ini adalah : 59.26 % =========================================================== >>> FOLD KE 6 Panjang data test : 77 Panjang data train : 691 Data Test : [0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0, 0.0, 1.0, 0.0, 0, 1.0, 0, 0.0, 0.0, 1.0, 0, 0.0, 1.0, 0.0, 0.0, 0, 0, 1.0, 0, 0, 1.0, 0.0, 1.0, 1.0, 0.0, 1, 0.0, 1.0, 0, 0.0, 0.0, 0.0, 1.0, 1.0, 0, 1.0, 1.0, 0.0, 0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0, 0.0, 0.0, 0.0, 0, 1.0, 0.0, 0.0, 0.0, 1, 1, 0.0, 1.0, 1, 0.0, 0] Confusion matrix : [[20 10] [ 9 38]] True positive : 20 False negative : 10 False positive : 9 True negative : 38 Classification report : precision recall f1-score support 1 0.69 0.67 0.68 30 0 0.79 0.81 0.80 47 accuracy 0.75 77 macro avg 0.74 0.74 0.74 77 weighted avg 0.75 0.75 0.75 77 Akurasi fold ini adalah : 75.32 % Presisi fold ini adalah : 68.97 % Recall fold ini adalah : 66.67 % =========================================================== w0 {'Glucose': {128: 1.0, 127: 0.0}} 1.0061808442091453 w1 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.4783997974499721 w2 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.49263919847481247 w3 {'Age': {29: 1.0, 28: 0.0}} 0.5185618264045502 w4 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.4269708013271865 w5 {'Pregnancies': {6: 0.0, 7: 1.0}} 0.3799576850840064 w6 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.1026153271886141 w7 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.1589440290287949 w8 {'Glucose': {128: 0.0, 127: 1.0}} 0.17412299009443705 w9 {'SkinThickness': {32: 1.0, 31: 0.0}} 0.12792405781993893 w10 {'DiabetesPedigreeFunction': {1.5270000000000001: 0.0, 0.527: 0.0}} 0.06901787357181162 w11 {'DiabetesPedigreeFunction': {1.5270000000000001: 0.0, 0.527: 1.0}} 0.08226841988681756 w12 {'BloodPressure': {69: 0.0, 68: 1.0}} 0.06452515847587553 w13 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.04086844030198589 w14 {'Insulin': {120: 0.0, 121: 1.0}} 0.04929115243699896 w15 {'SkinThickness': {32: 0.0, 31: 1.0}} 0.03597217091512214 w16 {'Pregnancies': {6: 0.0, 7: 0.0}} 0.020791996692471004 w17 {'Pregnancies': {6: 1.0, 7: 0.0}} 0.029286287382713893 w18 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.014007415207947554 w19 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.018524349452760144 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0] Confusion matrix : [[21 9] [ 8 39]] True positive : 21 False negative : 9 False positive : 8 True negative : 39 Classification report : precision recall f1-score support 1 0.72 0.70 0.71 30 0 0.81 0.83 0.82 47 accuracy 0.78 77 macro avg 0.77 0.76 0.77 77 weighted avg 0.78 0.78 0.78 77 Akurasi fold ini adalah : 77.92 % Presisi fold ini adalah : 72.41 % Recall fold ini adalah : 70.0 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0] Confusion matrix : [[18 12] [ 4 43]] True positive : 18 False negative : 12 False positive : 4 True negative : 43 Classification report : precision recall f1-score support 1 0.82 0.60 0.69 30 0 0.78 0.91 0.84 47 accuracy 0.79 77 macro avg 0.80 0.76 0.77 77 weighted avg 0.80 0.79 0.78 77 Akurasi fold ini adalah : 79.22 % Presisi fold ini adalah : 81.82 % Recall fold ini adalah : 60.0 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0] Confusion matrix : [[18 12] [ 3 44]] True positive : 18 False negative : 12 False positive : 3 True negative : 44 Classification report : precision recall f1-score support 1 0.86 0.60 0.71 30 0 0.79 0.94 0.85 47 accuracy 0.81 77 macro avg 0.82 0.77 0.78 77 weighted avg 0.81 0.81 0.80 77 Akurasi fold ini adalah : 80.52 % Presisi fold ini adalah : 85.71 % Recall fold ini adalah : 60.0 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0] Confusion matrix : [[18 12] [ 2 45]] True positive : 18 False negative : 12 False positive : 2 True negative : 45 Classification report : precision recall f1-score support 1 0.90 0.60 0.72 30 0 0.79 0.96 0.87 47 accuracy 0.82 77 macro avg 0.84 0.78 0.79 77 weighted avg 0.83 0.82 0.81 77 Akurasi fold ini adalah : 81.82 % Presisi fold ini adalah : 90.0 % Recall fold ini adalah : 60.0 % =========================================================== >>> FOLD KE 7 Panjang data test : 77 Panjang data train : 691 Data Test : [0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [1, 0.0, 0.0, 0, 0, 1.0, 1.0, 1.0, 1, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0, 1.0, 1.0, 0.0, 0, 0, 1.0, 1.0, 1, 1.0, 0, 1.0, 0.0, 0.0, 1.0, 1.0, 0, 0.0, 0.0, 0.0, 0, 1.0, 0, 0.0, 0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0, 0.0, 0, 0, 0.0, 1.0, 0.0, 1.0, 1, 0, 0, 0, 1.0, 0.0, 0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0] Confusion matrix : [[ 9 5] [16 47]] True positive : 9 False negative : 5 False positive : 16 True negative : 47 Classification report : precision recall f1-score support 1 0.36 0.64 0.46 14 0 0.90 0.75 0.82 63 accuracy 0.73 77 macro avg 0.63 0.69 0.64 77 weighted avg 0.80 0.73 0.75 77 Akurasi fold ini adalah : 72.73 % Presisi fold ini adalah : 36.0 % Recall fold ini adalah : 64.29 % =========================================================== w0 {'Glucose': {128: 1.0, 127: 0.0}} 1.013575245982202 w1 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.3864602230214747 w2 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.5248551091408121 w3 {'Age': {29: 1.0, 28: 0.0}} 0.5357816358981357 w4 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.4502685505967478 w5 {'Pregnancies': {6: 0.0, 7: 1.0}} 0.34449174353563367 w6 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.11141775781781224 w7 {'Glucose': {128: 0.0, 127: 0.0}} 0.16130843812645906 w8 {'Glucose': {128: 0.0, 127: 1.0}} 0.1604827588631298 w9 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.08912788485780045 w10 {'SkinThickness': {32: 1.0, 31: 0.0}} 0.1284319647235065 w11 {'DiabetesPedigreeFunction': {1.5270000000000001: 0.0, 0.527: 1.0}} 0.07986046253648521 w12 {'BloodPressure': {69: 0.0, 68: 1.0}} 0.08535240941321488 w13 {'Pregnancies': {6: 0.0, 7: 0.0}} 0.02415129371392441 w14 {'Pregnancies': {6: 1.0, 7: 0.0}} 0.03026539685326428 w15 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.01514770443041451 w16 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.023205914529791524 w17 {'DiabetesPedigreeFunction': {1.5270000000000001: 0.0, 0.527: 0.0}} 0.013628895491653758 w18 {'DiabetesPedigreeFunction': {1.5270000000000001: 0.0, 0.527: 1.0}} 0.016848329981988503 w19 {'Age': {29: 1.0, 28: 0.0}} 0.012223853986120949 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0] Confusion matrix : [[12 2] [16 47]] True positive : 12 False negative : 2 False positive : 16 True negative : 47 Classification report : precision recall f1-score support 1 0.43 0.86 0.57 14 0 0.96 0.75 0.84 63 accuracy 0.77 77 macro avg 0.69 0.80 0.71 77 weighted avg 0.86 0.77 0.79 77 Akurasi fold ini adalah : 76.62 % Presisi fold ini adalah : 42.86 % Recall fold ini adalah : 85.71 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] Confusion matrix : [[ 6 8] [12 51]] True positive : 6 False negative : 8 False positive : 12 True negative : 51 Classification report : precision recall f1-score support 1 0.33 0.43 0.38 14 0 0.86 0.81 0.84 63 accuracy 0.74 77 macro avg 0.60 0.62 0.61 77 weighted avg 0.77 0.74 0.75 77 Akurasi fold ini adalah : 74.03 % Presisi fold ini adalah : 33.33 % Recall fold ini adalah : 42.86 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0] Confusion matrix : [[ 7 7] [12 51]] True positive : 7 False negative : 7 False positive : 12 True negative : 51 Classification report : precision recall f1-score support 1 0.37 0.50 0.42 14 0 0.88 0.81 0.84 63 accuracy 0.75 77 macro avg 0.62 0.65 0.63 77 weighted avg 0.79 0.75 0.77 77 Akurasi fold ini adalah : 75.32 % Presisi fold ini adalah : 36.84 % Recall fold ini adalah : 50.0 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0] Confusion matrix : [[ 7 7] [12 51]] True positive : 7 False negative : 7 False positive : 12 True negative : 51 Classification report : precision recall f1-score support 1 0.37 0.50 0.42 14 0 0.88 0.81 0.84 63 accuracy 0.75 77 macro avg 0.62 0.65 0.63 77 weighted avg 0.79 0.75 0.77 77 Akurasi fold ini adalah : 75.32 % Presisi fold ini adalah : 36.84 % Recall fold ini adalah : 50.0 % =========================================================== >>> FOLD KE 8 Panjang data test : 77 Panjang data train : 691 Data Test : [1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [0.0, 1, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0, 1.0, 0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1, 0.0, 0.0, 1.0, 1.0, 0, 0.0, 0.0, 1, 0, 1, 0.0, 1.0, 0, 1.0, 0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0, 0, 0, 0, 1, 1.0, 0.0, 1.0, 0, 1.0, 0, 0, 0.0, 1.0, 0.0, 1, 0] Confusion matrix : [[18 7] [ 8 44]] True positive : 18 False negative : 7 False positive : 8 True negative : 44 Classification report : precision recall f1-score support 1 0.69 0.72 0.71 25 0 0.86 0.85 0.85 52 accuracy 0.81 77 macro avg 0.78 0.78 0.78 77 weighted avg 0.81 0.81 0.81 77 Akurasi fold ini adalah : 80.52 % Presisi fold ini adalah : 69.23 % Recall fold ini adalah : 72.0 % =========================================================== w0 {'Glucose': {128: 1.0, 127: 0.0}} 0.9841482072697947 w1 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.46224846486154436 w2 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.5196436942724918 w3 {'Age': {29: 1.0, 28: 0.0}} 0.5303185432481096 w4 {'DiabetesPedigreeFunction': {1.5270000000000001: 0.0, 0.527: 0.0}} 0.39130748997846165 w5 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.23845659656952886 w6 {'Pregnancies': {6: 0.0, 7: 1.0}} 0.16877180084536242 w7 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.2467368055831519 w8 {'Glucose': {128: 0.0, 127: 1.0}} 0.15487011239127857 w9 {'Pregnancies': {6: 0.0, 7: 1.0}} 0.09261000885578945 w10 {'BloodPressure': {69: 0.0, 68: 1.0}} 0.08674364962054859 w11 {'SkinThickness': {32: 1.0, 31: 0.0}} 0.06227603172806396 w12 {'Age': {29: 1.0, 28: 1.0}} 0.07694463918883722 w13 {'Age': {29: 1.0, 28: 0.0}} 0.031292415780461734 w14 {'SkinThickness': {32: 1.0, 31: 0.0}} 0.03365186221395913 w15 {'BloodPressure': {69: 0.0, 68: 1.0}} 0.022289551716320497 w16 {'Pregnancies': {6: 1.0, 7: 1.0}} 0.010996047299479447 w17 {'Pregnancies': {6: 0.0, 7: 1.0}} 0.01582432864914028 w18 {'Insulin': {120: 0.0, 121: 1.0}} 0.008194564468002383 w19 {'BMI': {28.8: 1.0, 27.8: 1.0}} 0.011459215961354628 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0] Confusion matrix : [[18 7] [ 7 45]] True positive : 18 False negative : 7 False positive : 7 True negative : 45 Classification report : precision recall f1-score support 1 0.72 0.72 0.72 25 0 0.87 0.87 0.87 52 accuracy 0.82 77 macro avg 0.79 0.79 0.79 77 weighted avg 0.82 0.82 0.82 77 Akurasi fold ini adalah : 81.82 % Presisi fold ini adalah : 72.0 % Recall fold ini adalah : 72.0 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0] Confusion matrix : [[20 5] [ 4 48]] True positive : 20 False negative : 5 False positive : 4 True negative : 48 Classification report : precision recall f1-score support 1 0.83 0.80 0.82 25 0 0.91 0.92 0.91 52 accuracy 0.88 77 macro avg 0.87 0.86 0.87 77 weighted avg 0.88 0.88 0.88 77 Akurasi fold ini adalah : 88.31 % Presisi fold ini adalah : 83.33 % Recall fold ini adalah : 80.0 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0] Confusion matrix : [[20 5] [ 4 48]] True positive : 20 False negative : 5 False positive : 4 True negative : 48 Classification report : precision recall f1-score support 1 0.83 0.80 0.82 25 0 0.91 0.92 0.91 52 accuracy 0.88 77 macro avg 0.87 0.86 0.87 77 weighted avg 0.88 0.88 0.88 77 Akurasi fold ini adalah : 88.31 % Presisi fold ini adalah : 83.33 % Recall fold ini adalah : 80.0 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0] Confusion matrix : [[20 5] [ 4 48]] True positive : 20 False negative : 5 False positive : 4 True negative : 48 Classification report : precision recall f1-score support 1 0.83 0.80 0.82 25 0 0.91 0.92 0.91 52 accuracy 0.88 77 macro avg 0.87 0.86 0.87 77 weighted avg 0.88 0.88 0.88 77 Akurasi fold ini adalah : 88.31 % Presisi fold ini adalah : 83.33 % Recall fold ini adalah : 80.0 % =========================================================== >>> FOLD KE 9 Panjang data test : 77 Panjang data train : 691 Data Test : [0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [0.0, 0, 1.0, 0.0, 0, 0, 1.0, 0.0, 0.0, 0.0, 0, 1.0, 1.0, 0, 0, 0.0, 0, 0, 0.0, 0.0, 0.0, 0.0, 1.0, 0, 0.0, 1.0, 1.0, 0.0, 0, 1.0, 1.0, 1.0, 1.0, 0, 0, 0.0, 0.0, 0, 0, 0.0, 0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0, 1.0, 0.0, 1.0, 0] Confusion matrix : [[14 10] [11 42]] True positive : 14 False negative : 10 False positive : 11 True negative : 42 Classification report : precision recall f1-score support 1 0.56 0.58 0.57 24 0 0.81 0.79 0.80 53 accuracy 0.73 77 macro avg 0.68 0.69 0.69 77 weighted avg 0.73 0.73 0.73 77 Akurasi fold ini adalah : 72.73 % Presisi fold ini adalah : 56.0 % Recall fold ini adalah : 58.33 % =========================================================== w0 {'Glucose': {128: 1.0, 127: 0.0}} 1.0434127393134984 w1 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.44777837761030315 w2 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.552772162274304 w3 {'Age': {29: 1.0, 28: 0.0}} 0.543792111842528 w4 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.43442504669735427 w5 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.3176231314373438 w6 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.16713539242381212 w7 {'Pregnancies': {6: 0.0, 7: 1.0}} 0.23893724973283714 w8 {'Glucose': {128: 0.0, 127: 1.0}} 0.16360946683035843 w9 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.07148949121830761 w10 {'DiabetesPedigreeFunction': {1.5270000000000001: 0.0, 0.527: 1.0}} 0.07333381810420848 w11 {'Insulin': {120: 0.0, 121: 1.0}} 0.09362085805671688 w12 {'BloodPressure': {69: 0.0, 68: 1.0}} 0.05579386210207226 w13 {'SkinThickness': {32: 1.0, 31: 0.0}} 0.04762790249963228 w14 {'Age': {29: 1.0, 28: 0.0}} 0.04902867645740026 w15 {'DiabetesPedigreeFunction': {1.5270000000000001: 0.0, 0.527: 1.0}} 0.03789745477763916 w16 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.014042660671587062 w17 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.014576774546838424 w18 {'Insulin': {120: 1.0, 121: 0.0}} 0.015159819443421865 w19 {'Glucose': {128: 0.0, 127: 0.0}} 0.01510706297110345 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0] Confusion matrix : [[16 8] [13 40]] True positive : 16 False negative : 8 False positive : 13 True negative : 40 Classification report : precision recall f1-score support 1 0.55 0.67 0.60 24 0 0.83 0.75 0.79 53 accuracy 0.73 77 macro avg 0.69 0.71 0.70 77 weighted avg 0.75 0.73 0.73 77 Akurasi fold ini adalah : 72.73 % Presisi fold ini adalah : 55.17 % Recall fold ini adalah : 66.67 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0] Confusion matrix : [[11 13] [ 8 45]] True positive : 11 False negative : 13 False positive : 8 True negative : 45 Classification report : precision recall f1-score support 1 0.58 0.46 0.51 24 0 0.78 0.85 0.81 53 accuracy 0.73 77 macro avg 0.68 0.65 0.66 77 weighted avg 0.71 0.73 0.72 77 Akurasi fold ini adalah : 72.73 % Presisi fold ini adalah : 57.89 % Recall fold ini adalah : 45.83 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0] Confusion matrix : [[11 13] [ 7 46]] True positive : 11 False negative : 13 False positive : 7 True negative : 46 Classification report : precision recall f1-score support 1 0.61 0.46 0.52 24 0 0.78 0.87 0.82 53 accuracy 0.74 77 macro avg 0.70 0.66 0.67 77 weighted avg 0.73 0.74 0.73 77 Akurasi fold ini adalah : 74.03 % Presisi fold ini adalah : 61.11 % Recall fold ini adalah : 45.83 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0] Confusion matrix : [[11 13] [ 7 46]] True positive : 11 False negative : 13 False positive : 7 True negative : 46 Classification report : precision recall f1-score support 1 0.61 0.46 0.52 24 0 0.78 0.87 0.82 53 accuracy 0.74 77 macro avg 0.70 0.66 0.67 77 weighted avg 0.73 0.74 0.73 77 Akurasi fold ini adalah : 74.03 % Presisi fold ini adalah : 61.11 % Recall fold ini adalah : 45.83 % =========================================================== >>> FOLD KE 10 Panjang data test : 75 Panjang data train : 693 Data Test : [1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0] ******************* Algoritma C4.5 ******************* Hasil Prediksi C45 : [1, 0, 0.0, 1.0, 0, 0, 0.0, 0, 0, 1.0, 1.0, 0.0, 0.0, 0.0, 0, 1.0, 1.0, 1.0, 0.0, 1, 0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0, 0, 0.0, 0, 1.0, 0, 0.0, 1, 0.0, 0.0, 1.0, 1, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0] Confusion matrix : [[19 11] [ 6 39]] True positive : 19 False negative : 11 False positive : 6 True negative : 39 Classification report : precision recall f1-score support 1 0.76 0.63 0.69 30 0 0.78 0.87 0.82 45 accuracy 0.77 75 macro avg 0.77 0.75 0.76 75 weighted avg 0.77 0.77 0.77 75 Akurasi fold ini adalah : 77.33 % Presisi fold ini adalah : 76.0 % Recall fold ini adalah : 63.33 % =========================================================== w0 {'Glucose': {128: 1.0, 127: 0.0}} 0.9954280524328791 w1 {'BMI': {28.8: 0.0, 27.8: 0.0}} 0.4960962675197953 w2 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.5793515397651382 w3 {'Age': {29: 1.0, 28: 0.0}} 0.5088250032061252 w4 {'DiabetesPedigreeFunction': {1.5270000000000001: 0.0, 0.527: 0.0}} 0.42240442841875286 w5 {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}} 0.2941688406433191 w6 {'Pregnancies': {6: 0.0, 7: 1.0}} 0.19359852648565157 w7 {'BMI': {28.8: 1.0, 27.8: 0.0}} 0.27460468148564887 w8 {'Glucose': {128: 0.0, 127: 1.0}} 0.16007636110845413 w9 {'Pregnancies': {6: 0.0, 7: 1.0}} 0.10368776088375188 w10 {'SkinThickness': {32: 1.0, 31: 0.0}} 0.0711609985833574 w11 {'BloodPressure': {69: 1.0, 68: 1.0}} 0.07055308712841289 w12 {'BloodPressure': {69: 0.0, 68: 1.0}} 0.07372795423874279 w13 {'Insulin': {120: 0.0, 121: 1.0}} 0.045615771971840455 w14 {'Pregnancies': {6: 1.0, 7: 1.0}} 0.03807727933399282 w15 {'Pregnancies': {6: 0.0, 7: 1.0}} 0.033227503737155786 w16 {'Insulin': {120: 1.0, 121: 1.0}} 0.01685753181252852 w17 {'Insulin': {120: 0.0, 121: 1.0}} 0.018017076974993405 w18 {'BMI': {28.8: 1.0, 27.8: 1.0}} 0.007765807218662133 w19 {'BMI': {28.8: 0.0, 27.8: 1.0}} 0.010433447997290273 ******************* Adaboost t- 5 ******************* Hasil Prediksi Adaboost : [1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0] Confusion matrix : [[21 9] [ 5 40]] True positive : 21 False negative : 9 False positive : 5 True negative : 40 Classification report : precision recall f1-score support 1 0.81 0.70 0.75 30 0 0.82 0.89 0.85 45 accuracy 0.81 75 macro avg 0.81 0.79 0.80 75 weighted avg 0.81 0.81 0.81 75 Akurasi fold ini adalah : 81.33 % Presisi fold ini adalah : 80.77 % Recall fold ini adalah : 70.0 % =========================================================== ******************* Adaboost t- 10 ******************* Hasil Prediksi Adaboost : [1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0] Confusion matrix : [[19 11] [ 4 41]] True positive : 19 False negative : 11 False positive : 4 True negative : 41 Classification report : precision recall f1-score support 1 0.83 0.63 0.72 30 0 0.79 0.91 0.85 45 accuracy 0.80 75 macro avg 0.81 0.77 0.78 75 weighted avg 0.80 0.80 0.79 75 Akurasi fold ini adalah : 80.0 % Presisi fold ini adalah : 82.61 % Recall fold ini adalah : 63.33 % =========================================================== ******************* Adaboost t- 15 ******************* Hasil Prediksi Adaboost : [1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0] Confusion matrix : [[19 11] [ 4 41]] True positive : 19 False negative : 11 False positive : 4 True negative : 41 Classification report : precision recall f1-score support 1 0.83 0.63 0.72 30 0 0.79 0.91 0.85 45 accuracy 0.80 75 macro avg 0.81 0.77 0.78 75 weighted avg 0.80 0.80 0.79 75 Akurasi fold ini adalah : 80.0 % Presisi fold ini adalah : 82.61 % Recall fold ini adalah : 63.33 % =========================================================== ******************* Adaboost t- 20 ******************* Hasil Prediksi Adaboost : [1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0] Confusion matrix : [[19 11] [ 4 41]] True positive : 19 False negative : 11 False positive : 4 True negative : 41 Classification report : precision recall f1-score support 1 0.83 0.63 0.72 30 0 0.79 0.91 0.85 45 accuracy 0.80 75 macro avg 0.81 0.77 0.78 75 weighted avg 0.80 0.80 0.79 75 Akurasi fold ini adalah : 80.0 % Presisi fold ini adalah : 82.61 % Recall fold ini adalah : 63.33 % ===========================================================
def get_colname(df):
tempp=df.columns
return tempp[0][4:]
def get_max_at_fold(df):
print("====================================")
print("Nilai Max keseluruhan fold :")
print("====================================")
sr = pd.Series([df.iloc[i].max() for i in df.index],index=[i+1 for i in df.index])
print(sr)
print("\n>>> Nilai maksimal keseluruhan fold :")
print(sr.idxmax(), "=",sr.max())
def get_mean_max_at_fold(df):
print("====================================")
print("Nilai rata-rata keseluruhan fold :")
print("====================================")
sr = pd.Series([df[i].mean() for i in df.columns],index=[i for i in df.columns])
print(sr)
print("\n>>> Nilai maksimal rata-rata keseluruhan fold :")
print(sr.idxmax(), "=",sr.max())
def conclute_to_df(acc_adb, acc_c45):
k = ["k"+str(i+1) for i in range(10)]
sax = pd.DataFrame({'k': k})
for i in acc_c45:
idc45 = 'C45-'+str(i)
sax[idc45] = acc_c45[i]
for zz,skz in enumerate(sknar):
adin='t-'+str(skz)+' '+str(i)
sax[adin]=[x[zz] for x in acc_adb[i]]
# sax.to_csv('noknn9.csv', index=False)
return sax
def get_df(sax,pat):
temp=[]
for i in pat:
col_aku = sax.columns[sax.columns.str.contains(i)].tolist()
temp.append(sax.get(col_aku))
return temp
def get_conclute_data(acc_adb,acc_c45):
sax = conclute_to_df(acc_adb,acc_c45)
temp = get_df(sax,acc_adb.keys())
for i in temp :
i_name=get_colname(i)
print("******************")
print("Nilai",i_name)
print("******************")
print(i)
get_max_at_fold(i)
get_mean_max_at_fold(i)
get_conclute_data(wk_acc_adb,wk_acc_c45)
****************** Nilai akurasi ****************** C45-akurasi t-5 akurasi t-10 akurasi t-15 akurasi t-20 akurasi 0 63.64 64.94 70.13 70.13 70.13 1 74.03 74.03 77.92 77.92 77.92 2 81.82 76.62 77.92 79.22 79.22 3 67.53 66.23 70.13 67.53 70.13 4 77.92 77.92 79.22 79.22 80.52 5 81.82 77.92 81.82 81.82 81.82 6 77.92 79.22 81.82 81.82 81.82 7 81.82 84.42 85.71 85.71 85.71 8 70.13 71.43 72.73 72.73 72.73 9 81.33 81.33 78.67 78.67 78.67 ==================================== Nilai Max keseluruhan fold : ==================================== 1 70.13 2 77.92 3 81.82 4 70.13 5 80.52 6 81.82 7 81.82 8 85.71 9 72.73 10 81.33 dtype: float64 >>> Nilai maksimal keseluruhan fold : 8 = 85.71 ==================================== Nilai rata-rata keseluruhan fold : ==================================== C45-akurasi 75.796 t-5 akurasi 75.406 t-10 akurasi 77.607 t-15 akurasi 77.477 t-20 akurasi 77.867 dtype: float64 >>> Nilai maksimal rata-rata keseluruhan fold : t-20 akurasi = 77.86699999999999 ****************** Nilai presisi ****************** C45-presisi t-5 presisi t-10 presisi t-15 presisi t-20 presisi 0 57.69 59.26 63.64 64.52 64.52 1 53.57 53.12 61.90 61.90 60.87 2 76.32 75.00 74.29 75.00 75.00 3 57.58 56.67 62.07 57.58 62.07 4 70.83 75.00 72.00 76.19 75.00 5 76.67 74.07 80.77 80.77 78.57 6 44.00 43.75 50.00 50.00 50.00 7 66.67 68.57 75.00 75.00 75.00 8 51.85 53.12 56.00 56.52 56.52 9 78.57 78.57 79.17 79.17 79.17 ==================================== Nilai Max keseluruhan fold : ==================================== 1 64.52 2 61.90 3 76.32 4 62.07 5 76.19 6 80.77 7 50.00 8 75.00 9 56.52 10 79.17 dtype: float64 >>> Nilai maksimal keseluruhan fold : 6 = 80.77 ==================================== Nilai rata-rata keseluruhan fold : ==================================== C45-presisi 63.375 t-5 presisi 63.713 t-10 presisi 67.484 t-15 presisi 67.665 t-20 presisi 67.672 dtype: float64 >>> Nilai maksimal rata-rata keseluruhan fold : t-20 presisi = 67.672 ****************** Nilai recall ****************** C45-recall t-5 recall t-10 recall t-15 recall t-20 recall 0 46.88 50.00 65.62 62.50 62.50 1 68.18 77.27 59.09 59.09 63.64 2 85.29 70.59 76.47 79.41 79.41 3 63.33 56.67 60.00 63.33 60.00 4 62.96 55.56 66.67 59.26 66.67 5 76.67 66.67 70.00 70.00 73.33 6 78.57 50.00 78.57 78.57 78.57 7 88.00 96.00 84.00 84.00 84.00 8 58.33 70.83 58.33 54.17 54.17 9 73.33 73.33 63.33 63.33 63.33 ==================================== Nilai Max keseluruhan fold : ==================================== 1 65.62 2 77.27 3 85.29 4 63.33 5 66.67 6 76.67 7 78.57 8 96.00 9 70.83 10 73.33 dtype: float64 >>> Nilai maksimal keseluruhan fold : 8 = 96.0 ==================================== Nilai rata-rata keseluruhan fold : ==================================== C45-recall 70.154 t-5 recall 66.692 t-10 recall 68.208 t-15 recall 67.366 t-20 recall 68.562 dtype: float64 >>> Nilai maksimal rata-rata keseluruhan fold : C45-recall = 70.15400000000001
get_conclute_data(nk_acc_adb,nk_acc_c45)
****************** Nilai akurasi ****************** C45-akurasi t-5 akurasi t-10 akurasi t-15 akurasi t-20 akurasi 0 63.64 63.64 71.43 71.43 71.43 1 71.43 76.62 77.92 77.92 77.92 2 74.03 76.62 79.22 79.22 79.22 3 62.34 66.23 67.53 67.53 67.53 4 76.62 76.62 79.22 79.22 79.22 5 75.32 77.92 79.22 80.52 81.82 6 72.73 76.62 74.03 75.32 75.32 7 80.52 81.82 88.31 88.31 88.31 8 72.73 72.73 72.73 74.03 74.03 9 77.33 81.33 80.00 80.00 80.00 ==================================== Nilai Max keseluruhan fold : ==================================== 1 71.43 2 77.92 3 79.22 4 67.53 5 79.22 6 81.82 7 76.62 8 88.31 9 74.03 10 81.33 dtype: float64 >>> Nilai maksimal keseluruhan fold : 8 = 88.31 ==================================== Nilai rata-rata keseluruhan fold : ==================================== C45-akurasi 72.669 t-5 akurasi 75.015 t-10 akurasi 76.961 t-15 akurasi 77.350 t-20 akurasi 77.480 dtype: float64 >>> Nilai maksimal rata-rata keseluruhan fold : t-20 akurasi = 77.47999999999999 ****************** Nilai presisi ****************** C45-presisi t-5 presisi t-10 presisi t-15 presisi t-20 presisi 0 59.09 57.14 69.23 70.83 70.83 1 50.00 57.69 64.71 64.71 64.71 2 68.42 71.05 82.14 82.14 82.14 3 51.72 56.67 60.87 60.87 60.87 4 69.57 69.57 76.19 76.19 76.19 5 68.97 72.41 81.82 85.71 90.00 6 36.00 42.86 33.33 36.84 36.84 7 69.23 72.00 83.33 83.33 83.33 8 56.00 55.17 57.89 61.11 61.11 9 76.00 80.77 82.61 82.61 82.61 ==================================== Nilai Max keseluruhan fold : ==================================== 1 70.83 2 64.71 3 82.14 4 60.87 5 76.19 6 90.00 7 42.86 8 83.33 9 61.11 10 82.61 dtype: float64 >>> Nilai maksimal keseluruhan fold : 6 = 90.0 ==================================== Nilai rata-rata keseluruhan fold : ==================================== C45-presisi 60.500 t-5 presisi 63.533 t-10 presisi 69.212 t-15 presisi 70.434 t-20 presisi 70.863 dtype: float64 >>> Nilai maksimal rata-rata keseluruhan fold : t-20 presisi = 70.863 ****************** Nilai recall ****************** C45-recall t-5 recall t-10 recall t-15 recall t-20 recall 0 40.62 50.00 56.25 53.12 53.12 1 54.55 68.18 50.00 50.00 50.00 2 76.47 79.41 67.65 67.65 67.65 3 50.00 56.67 46.67 46.67 46.67 4 59.26 59.26 59.26 59.26 59.26 5 66.67 70.00 60.00 60.00 60.00 6 64.29 85.71 42.86 50.00 50.00 7 72.00 72.00 80.00 80.00 80.00 8 58.33 66.67 45.83 45.83 45.83 9 63.33 70.00 63.33 63.33 63.33 ==================================== Nilai Max keseluruhan fold : ==================================== 1 56.25 2 68.18 3 79.41 4 56.67 5 59.26 6 70.00 7 85.71 8 80.00 9 66.67 10 70.00 dtype: float64 >>> Nilai maksimal keseluruhan fold : 7 = 85.71 ==================================== Nilai rata-rata keseluruhan fold : ==================================== C45-recall 60.552 t-5 recall 67.790 t-10 recall 57.185 t-15 recall 57.586 t-20 recall 57.586 dtype: float64 >>> Nilai maksimal rata-rata keseluruhan fold : t-5 recall = 67.78999999999999
{'Glucose': {127: {'Age': {28: {'Pregnancies': {6: {'BMI': {28.8: {'DiabetesPedigreeFunction': {0.527: {'Insulin': {120: {'BloodPressure': {69: {'SkinThickness': {32: 0.0, 31: 0.0}}, 68: {'SkinThickness': {31: 0.0, 32: 0.0}}}}, 121: {'BloodPressure': {69: 0, 68: {'SkinThickness': {32: 0.0, 31: 0.0}}}}}}, 1.5270000000000001: {'SkinThickness': {31: {'Insulin': {121: 0, 120: {'BloodPressure': {68: 0.0, 69: 0.0}}}}, 32: {'BloodPressure': {68: {'Insulin': {120: 0.0, 121: 1.0}}, 69: 0}}}}}}, 27.8: {'DiabetesPedigreeFunction': {0.527: 0, 1.5270000000000001: {'BloodPressure': {68: {'Insulin': {120: 0.0, 121: 0.0}}, 69: 0}}}}}}, 7: 1}}, 29: {'Insulin': {120: {'BMI': {28.8: {'DiabetesPedigreeFunction': {1.5270000000000001: {'Pregnancies': {7: {'SkinThickness': {31: {'BloodPressure': {69: 0.0, 68: 0.0}}, 32: {'SkinThickness': {32: 1.0}}}}, 6: {'BloodPressure': {69: {'SkinThickness': {31: 0.0, 32: 0.0}}, 68: {'SkinThickness': {32: 0.0, 31: 0.0}}}}}}, 0.527: {'SkinThickness': {32: {'BloodPressure': {69: {'Pregnancies': {7: 0.0, 6: 0.0}}, 68: {'Pregnancies': {6: 0.0, 7: 0.0}}}}, 31: {'BloodPressure': {69: {'Pregnancies': {6: 0.0, 7: 0.0}}, 68: {'Pregnancies': {7: 0.0, 6: 0.0}}}}}}}}, 27.8: {'SkinThickness': {31: {'DiabetesPedigreeFunction': {0.527: {'BloodPressure': {68: {'Pregnancies': {6: 0.0, 7: 0.0}}, 69: {'Pregnancies': {6: 0.0, 7: 0.0}}}}, 1.5270000000000001: {'BloodPressure': {68: {'Pregnancies': {6: 0.0, 7: 1.0}}, 69: {'Pregnancies': {6: 0.0, 7: 0.0}}}}}}, 32: 0}}}}, 121: {'DiabetesPedigreeFunction': {0.527: {'BMI': {28.8: {'BloodPressure': {69: {'SkinThickness': {31: {'Pregnancies': {6: 0.0, 7: 1.0}}, 32: {'Pregnancies': {6: 0.0, 7: 0.0}}}}, 68: {'SkinThickness': {31: 0.0, 32: 0.0}}}}, 27.8: 0}}, 1.5270000000000001: {'Pregnancies': {6: {'BMI': {28.8: {'SkinThickness': {31: 1, 32: {'BloodPressure': {69: 0.0, 68: 1.0}}}}, 27.8: {'BMI': {27.8: 0.0}}}}, 7: 1}}}}}}}}, 128: {'BMI': {28.8: {'Pregnancies': {6: {'Age': {28: {'SkinThickness': {31: {'DiabetesPedigreeFunction': {0.527: {'Insulin': {120: {'BloodPressure': {68: 1.0, 69: 0.0}}, 121: {'Insulin': {121: 0.0}}}}, 1.5270000000000001: {'Insulin': {121: {'BloodPressure': {68: 1.0, 69: 0.0}}, 120: {'BloodPressure': {69: 1.0, 68: 0.0}}}}}}, 32: {'BloodPressure': {69: {'Insulin': {121: {'DiabetesPedigreeFunction': {0.527: 0.0, 1.5270000000000001: 0.0}}, 120: {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 1.0}}}}, 68: {'DiabetesPedigreeFunction': {1.5270000000000001: {'Insulin': {120: 0.0, 121: 1.0}}, 0.527: 1}}}}}}, 29: {'BloodPressure': {69: {'DiabetesPedigreeFunction': {0.527: {'SkinThickness': {31: {'Insulin': {120: 1.0, 121: 1.0}}, 32: {'Insulin': {120: 0.0, 121: 1.0}}}}, 1.5270000000000001: {'Insulin': {120: {'SkinThickness': {31: 1.0, 32: 1.0}}, 121: {'SkinThickness': {31: 1.0, 32: 0.0}}}}}}, 68: {'SkinThickness': {31: {'Insulin': {121: {'DiabetesPedigreeFunction': {0.527: 1.0, 1.5270000000000001: 0.0}}, 120: {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 1.0}}}}, 32: 1}}}}}}, 7: {'DiabetesPedigreeFunction': {0.527: {'BloodPressure': {69: {'SkinThickness': {32: {'Insulin': {120: 1.0, 121: 1.0}}, 31: {'Insulin': {120: 1.0, 121: 0.0}}}}, 68: 1}}, 1.5270000000000001: {'SkinThickness': {31: 1, 32: {'Insulin': {121: {'BloodPressure': {68: 1.0, 69: 1.0}}, 120: 1}}}}}}}}, 27.8: {'SkinThickness': {31: {'DiabetesPedigreeFunction': {0.527: {'Pregnancies': {6: {'Insulin': {120: {'BloodPressure': {69: {'Age': {29: 0.0, 28: 0.0}}, 68: {'Age': {28: 1.0, 29: 0.0}}}}, 121: {'Age': {28: 0, 29: {'BloodPressure': {68: 0.0, 69: 1.0}}}}}}, 7: {'Insulin': {121: 1, 120: {'BloodPressure': {68: 0.0, 69: 0.0}}}}}}, 1.5270000000000001: {'Pregnancies': {6: 0, 7: {'Insulin': {121: 0.0, 120: 0.0}}}}}}, 32: {'Insulin': {120: {'DiabetesPedigreeFunction': {1.5270000000000001: 1.0, 0.527: 0.0}}, 121: 1}}}}}}}}
adatree[0][0][0]
{'Glucose': {127: 0.0, 128: 1.0}}
adatree[0][0][1]
1.0889875752939042